AOT-biomaps 2.9.138__py3-none-any.whl → 2.9.279__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of AOT-biomaps might be problematic. Click here for more details.

Files changed (31) hide show
  1. AOT_biomaps/AOT_Acoustic/AcousticTools.py +35 -115
  2. AOT_biomaps/AOT_Acoustic/StructuredWave.py +2 -2
  3. AOT_biomaps/AOT_Acoustic/_mainAcoustic.py +22 -18
  4. AOT_biomaps/AOT_Experiment/Tomography.py +74 -4
  5. AOT_biomaps/AOT_Experiment/_mainExperiment.py +102 -68
  6. AOT_biomaps/AOT_Optic/_mainOptic.py +124 -58
  7. AOT_biomaps/AOT_Recon/AOT_Optimizers/DEPIERRO.py +72 -108
  8. AOT_biomaps/AOT_Recon/AOT_Optimizers/LS.py +474 -289
  9. AOT_biomaps/AOT_Recon/AOT_Optimizers/MAPEM.py +173 -68
  10. AOT_biomaps/AOT_Recon/AOT_Optimizers/MLEM.py +360 -154
  11. AOT_biomaps/AOT_Recon/AOT_Optimizers/PDHG.py +150 -111
  12. AOT_biomaps/AOT_Recon/AOT_PotentialFunctions/RelativeDifferences.py +10 -14
  13. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/SparseSMatrix_CSR.py +281 -0
  14. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/SparseSMatrix_SELL.py +328 -0
  15. AOT_biomaps/AOT_Recon/AOT_SparseSMatrix/__init__.py +2 -0
  16. AOT_biomaps/AOT_Recon/AOT_biomaps_kernels.cubin +0 -0
  17. AOT_biomaps/AOT_Recon/AlgebraicRecon.py +359 -238
  18. AOT_biomaps/AOT_Recon/AnalyticRecon.py +29 -41
  19. AOT_biomaps/AOT_Recon/BayesianRecon.py +165 -91
  20. AOT_biomaps/AOT_Recon/DeepLearningRecon.py +4 -1
  21. AOT_biomaps/AOT_Recon/PrimalDualRecon.py +175 -31
  22. AOT_biomaps/AOT_Recon/ReconEnums.py +38 -3
  23. AOT_biomaps/AOT_Recon/ReconTools.py +184 -77
  24. AOT_biomaps/AOT_Recon/__init__.py +1 -0
  25. AOT_biomaps/AOT_Recon/_mainRecon.py +144 -74
  26. AOT_biomaps/__init__.py +4 -36
  27. {aot_biomaps-2.9.138.dist-info → aot_biomaps-2.9.279.dist-info}/METADATA +2 -1
  28. aot_biomaps-2.9.279.dist-info/RECORD +47 -0
  29. aot_biomaps-2.9.138.dist-info/RECORD +0 -43
  30. {aot_biomaps-2.9.138.dist-info → aot_biomaps-2.9.279.dist-info}/WHEEL +0 -0
  31. {aot_biomaps-2.9.138.dist-info → aot_biomaps-2.9.279.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,9 @@ class AnalyticRecon(Recon):
21
21
  self._analyticReconPython(withTumor)
22
22
  else:
23
23
  raise ValueError(f"Unknown analytic reconstruction type: {processType}")
24
+
25
+ def checkExistingFile(self, date = None):
26
+ raise NotImplementedError("checkExistingFile method is not implemented yet.")
24
27
 
25
28
  def _analyticReconPython(self,withTumor):
26
29
  """
@@ -48,56 +51,41 @@ class AnalyticRecon(Recon):
48
51
  def _iFourierRecon(self, AOsignal):
49
52
  """
50
53
  Reconstruction d'image utilisant la transformation de Fourier inverse.
51
-
52
- :param AOsignal: Signal dans le domaine temporel.
54
+ :param AOsignal: Signal dans le domaine temporel (shape: N_t, N_theta).
53
55
  :return: Image reconstruite dans le domaine spatial.
54
56
  """
55
- # Signal dans le domaine fréquentiel (FFT sur l'axe temporel)
56
- s_tilde = np.fft.fft(AOsignal, axis=0)
57
-
58
- theta = np.array([af.angle for af in self.experiment.AcousticFields]) # angles (N_theta,)
59
- f_s = np.array([af.f_s for af in self.experiment.AcousticFields]) # spatial freqs (N_theta,)
60
- f_t = np.fft.fftfreq(AOsignal.shape[0], d=self.experiment.dt) # temporal freqs
61
-
57
+ theta = np.array([af.angle for af in self.experiment.AcousticFields])
58
+ f_s = np.array([af.f_s for af in self.experiment.AcousticFields])
59
+ dt = self.experiment.dt
60
+ f_t = np.fft.fftfreq(AOsignal.shape[0], d=dt) # fréquences temporelles
62
61
  x = self.experiment.OpticImage.laser.x
63
62
  z = self.experiment.OpticImage.laser.z
64
- X, Z = np.meshgrid(x, z, indexing='ij') # shape (Nx, Nz)
65
-
66
- N_theta = len(theta)
67
- I_rec = np.zeros((len(x), len(z)), dtype=complex)
68
-
69
- for i, th in enumerate(trange(N_theta, desc="AOT-BioMaps -- Analytic Recontruction Tomography : iFourier (Processing projection) ---- processing on single CPU ----")):
70
- fs = f_s[i]
71
-
72
- # Projection des coordonnées dans le repère tourné
73
- x_prime = X * np.cos(th) + Z * np.sin(th)
74
- z_prime = -X * np.sin(th) + Z * np.cos(th)
63
+ X, Z = np.meshgrid(x, z, indexing='ij') # grille spatiale (Nx, Nz)
75
64
 
76
- # Signal spectral pour cet angle (1D pour chaque f_t)
77
- s_angle = s_tilde[:, i] # shape (len(f_t),)
65
+ # Transformée de Fourier du signal
66
+ s_tilde = np.fft.fft(AOsignal, axis=0) # shape: (N_t, N_theta)
78
67
 
79
- # Grille 2D des fréquences
80
- F_t, F_s = np.meshgrid(f_t, [fs], indexing='ij') # F_t: (len(f_t), 1), F_s: (1, 1)
81
-
82
- # Phase : exp(2iπ(x' f_s + z' f_t)) = (x_prime * f_s + z_prime * f_t)
83
- phase = 2j * np.pi * (x_prime[:, :, None] * fs + z_prime[:, :, None] * f_t[None, None, :])
84
-
85
- # reshape s_angle to (len(f_t), 1, 1)
86
- s_angle = s_angle[:, None, None]
87
-
88
- # Contribution de cet angle
89
- integrand = s_angle * np.exp(phase)
90
-
91
- # Intégration sur f_t (somme discrète)
92
- I_theta = np.sum(integrand, axis=0)
93
-
94
- # Ajout à la reconstruction
95
- I_rec += I_theta
96
-
97
- I_rec /= N_theta
68
+ # Initialisation de l'image reconstruite
69
+ I_rec = np.zeros((len(x), len(z)), dtype=complex)
98
70
 
71
+ # Boucle sur les angles
72
+ for i, th in enumerate(trange(len(theta), desc="AOT-BioMaps -- iFourier Reconstruction")):
73
+ # Coordonnées tournées
74
+ X_prime = X * np.cos(th) + Z * np.sin(th)
75
+ Z_prime = -X * np.sin(th) + Z * np.cos(th)
76
+
77
+ # Pour chaque fréquence temporelle f_t[j]
78
+ for j in range(len(f_t)):
79
+ # Phase: exp(2jπ (X_prime * f_s[i] + Z_prime * f_t[j]))
80
+ phase = 2j * np.pi * (X_prime * f_s[i] + Z_prime * f_t[j])
81
+ # Contribution de cette fréquence
82
+ I_rec += s_tilde[j, i] * np.exp(phase) * dt # Pondération par dt pour l'intégration
83
+
84
+ # Normalisation
85
+ I_rec /= len(theta)
99
86
  return np.abs(I_rec)
100
87
 
88
+
101
89
  def _iRadonRecon(self, AOsignal):
102
90
  """
103
91
  Reconstruction d'image utilisant la méthode iRadon.
@@ -1,11 +1,13 @@
1
1
  from AOT_biomaps.AOT_Recon.AlgebraicRecon import AlgebraicRecon
2
2
  from AOT_biomaps.AOT_Recon.ReconEnums import ReconType, OptimizerType, PotentialType, ProcessType
3
3
  from .ReconTools import check_gpu_memory, calculate_memory_requirement
4
- from .AOT_Optimizers import MAPEM, DEPIERRO
4
+ from .AOT_Optimizers import MAPEM, MAPEM_STOP, DEPIERRO
5
5
  from AOT_biomaps.Config import config
6
6
 
7
7
  import warnings
8
8
  import numpy as np
9
+ import os
10
+ from datetime import datetime
9
11
 
10
12
  class BayesianRecon(AlgebraicRecon):
11
13
  """
@@ -36,7 +38,146 @@ class BayesianRecon(AlgebraicRecon):
36
38
  if not isinstance(self.potentialFunction, PotentialType):
37
39
  raise TypeError(f"Potential functions must be of type PotentialType, got {type(self.potentialFunction)}")
38
40
 
39
- def run(self, processType=ProcessType.PYTHON, withTumor=True):
41
+ def checkExistingFile(self, date = None):
42
+ """
43
+ Check if the reconstruction file already exists, based on current instance parameters.
44
+
45
+ Args:
46
+ withTumor (bool): If True, checks the phantom file; otherwise, checks the laser file.
47
+ overwrite (bool): If False, returns False if the file exists.
48
+
49
+ Returns:
50
+ tuple: (bool: whether to save, str: the filepath)
51
+ """
52
+ if self.saveDir is None:
53
+ raise ValueError("Save directory is not specified.")
54
+
55
+ # Construction du chemin du fichier
56
+ if date is None:
57
+ date = datetime.now().strftime("%d%m")
58
+
59
+ opt_name = self.optimizer.value
60
+ pot_name = self.potentialFunction.value
61
+ dir_name = f'results_{date}_{opt_name}_{pot_name}'
62
+
63
+ if self.optimizer == OptimizerType.PPGMLEM:
64
+ dir_name += f'_Beta_{self.beta}_Delta_{self.delta}_Gamma_{self.gamma}_Sigma_{self.sigma}'
65
+ elif self.optimizer in (OptimizerType.PGC, OptimizerType.DEPIERRO95):
66
+ dir_name += f'_Beta_{self.beta}_Sigma_{self.sigma}'
67
+
68
+ results_dir = os.path.join(self.saveDir, dir_name)
69
+ if not os.path.exists(results_dir):
70
+ os.makedirs(results_dir)
71
+
72
+ if os.path.exists(os.path.join(results_dir,"indices.npy")):
73
+ return (True, results_dir)
74
+
75
+ return (False, results_dir)
76
+
77
+ def load(self, withTumor=True, results_date=None, optimizer=None, potential_function=None, filePath=None, show_logs=True):
78
+ """
79
+ Load the reconstruction results and indices as lists of 2D np arrays for Bayesian reconstruction and store them in self.
80
+ If the loaded file is a 3D array, it is split into a list of 2D arrays.
81
+ """
82
+ if filePath is not None:
83
+ # Mode chargement direct depuis un fichier
84
+ recon_key = 'reconPhantom' if withTumor else 'reconLaser'
85
+ recon_path = filePath
86
+ if not os.path.exists(recon_path):
87
+ raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
88
+ # Charge le fichier (3D ou liste de 2D)
89
+ data = np.load(recon_path, allow_pickle=True)
90
+ # Découpe en liste de 2D si c'est un tableau 3D
91
+ if isinstance(data, np.ndarray) and data.ndim == 3:
92
+ if withTumor:
93
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
94
+ else:
95
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
96
+ else:
97
+ # Sinon, suppose que c'est déjà une liste de 2D
98
+ if withTumor:
99
+ self.reconPhantom = data
100
+ else:
101
+ self.reconLaser = data
102
+ # Essayer de charger les indices
103
+ base_dir, _ = os.path.split(recon_path)
104
+ indices_path = os.path.join(base_dir, 'indices.npy')
105
+ if os.path.exists(indices_path):
106
+ indices_data = np.load(indices_path, allow_pickle=True)
107
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
108
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
109
+ else:
110
+ self.indices = indices_data
111
+ else:
112
+ self.indices = None
113
+ if show_logs:
114
+ print(f"Loaded reconstruction results and indices from {recon_path}")
115
+ else:
116
+ # Mode chargement depuis le répertoire de résultats
117
+ if self.saveDir is None:
118
+ raise ValueError("Save directory is not specified. Please set saveDir before loading.")
119
+ # Use current optimizer and potential function if not provided
120
+ opt_name = optimizer.value if optimizer is not None else self.optimizer.value
121
+ pot_name = potential_function.value if potential_function is not None else self.potentialFunction.value
122
+ # Build the base directory pattern
123
+ dir_pattern = f'results_*_{opt_name}_{pot_name}'
124
+ # Add parameters to the pattern based on the optimizer
125
+ if optimizer is None:
126
+ optimizer = self.optimizer
127
+ if optimizer == OptimizerType.PPGMLEM:
128
+ beta_str = f'_Beta_{self.beta}'
129
+ delta_str = f'_Delta_{self.delta}'
130
+ gamma_str = f'_Gamma_{self.gamma}'
131
+ sigma_str = f'_Sigma_{self.sigma}'
132
+ dir_pattern += f'{beta_str}{delta_str}{gamma_str}{sigma_str}'
133
+ elif optimizer in (OptimizerType.PGC, OptimizerType.DEPIERRO95):
134
+ beta_str = f'_Beta_{self.beta}'
135
+ sigma_str = f'_Sigma_{self.sigma}'
136
+ dir_pattern += f'{beta_str}{sigma_str}'
137
+ # Find the most recent results directory if no date is specified
138
+ if results_date is None:
139
+ dirs = [d for d in os.listdir(self.saveDir) if os.path.isdir(os.path.join(self.saveDir, d)) and dir_pattern in d]
140
+ if not dirs:
141
+ raise FileNotFoundError(f"No matching results directory found for pattern '{dir_pattern}' in {self.saveDir}.")
142
+ dirs.sort(reverse=True) # Most recent first
143
+ results_dir = os.path.join(self.saveDir, dirs[0])
144
+ else:
145
+ results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}_{pot_name}')
146
+ if optimizer == OptimizerType.PPGMLEM:
147
+ results_dir += f'_Beta_{self.beta}_Delta_{self.delta}_Gamma_{self.gamma}_Sigma_{self.sigma}'
148
+ elif optimizer in (OptimizerType.PGC, OptimizerType.DEPIERRO95):
149
+ results_dir += f'_Beta_{self.beta}_Sigma_{self.sigma}'
150
+ if not os.path.exists(results_dir):
151
+ raise FileNotFoundError(f"Directory {results_dir} does not exist.")
152
+ # Load reconstruction results
153
+ recon_key = 'reconPhantom' if withTumor else 'reconLaser'
154
+ recon_path = os.path.join(results_dir, f'{recon_key}.npy')
155
+ if not os.path.exists(recon_path):
156
+ raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
157
+ data = np.load(recon_path, allow_pickle=True)
158
+ if isinstance(data, np.ndarray) and data.ndim == 3:
159
+ if withTumor:
160
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
161
+ else:
162
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
163
+ else:
164
+ if withTumor:
165
+ self.reconPhantom = data
166
+ else:
167
+ self.reconLaser = data
168
+ # Load saved indices as list of 2D arrays
169
+ indices_path = os.path.join(results_dir, 'indices.npy')
170
+ if not os.path.exists(indices_path):
171
+ raise FileNotFoundError(f"No indices file found at {indices_path}.")
172
+ indices_data = np.load(indices_path, allow_pickle=True)
173
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
174
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
175
+ else:
176
+ self.indices = indices_data
177
+ if show_logs:
178
+ print(f"Loaded reconstruction results and indices from {results_dir}")
179
+
180
+ def run(self, processType=ProcessType.PYTHON, withTumor=True, show_logs=True):
40
181
  """
41
182
  This method is a placeholder for the Bayesian reconstruction process.
42
183
  It currently does not perform any operations but serves as a template for future implementations.
@@ -48,109 +189,42 @@ class BayesianRecon(AlgebraicRecon):
48
189
  else:
49
190
  raise ValueError(f"Unknown Bayesian reconstruction type: {processType}")
50
191
 
51
- def _bayesianReconCASToR(self, withTumor):
192
+ def _bayesianReconCASToR(self, show_logs, withTumor):
52
193
  raise NotImplementedError("CASToR Bayesian reconstruction is not implemented yet.")
53
194
 
54
- def _bayesianReconPython(self, withTumor):
55
-
195
+ def _bayesianReconPython(self, show_logs, withTumor):
56
196
  if withTumor:
57
197
  if self.experiment.AOsignal_withTumor is None:
58
198
  raise ValueError("AO signal with tumor is not available. Please generate AO signal with tumor the experiment first in the experiment object.")
59
199
  if self.optimizer.value == OptimizerType.PPGMLEM.value:
60
- self.reconPhantom = self._MAPEM_STOP(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor)
200
+ self.reconPhantom, self.indices = MAPEM_STOP(
201
+ SMatrix=self.SMatrix,
202
+ y=self.experiment.AOsignal_withTumor,
203
+ Omega=self.potentialFunction,
204
+ beta=self.beta,
205
+ delta=self.delta,
206
+ gamma=self.gamma,
207
+ sigma=self.sigma,
208
+ numIterations=self.numIterations,
209
+ isSavingEachIteration=self.isSavingEachIteration,
210
+ withTumor=withTumor,
211
+ device=self.device,
212
+ max_saves=5000,
213
+ show_logs=True)
61
214
  elif self.optimizer.value == OptimizerType.PGC.value:
62
- self.reconPhantom = self._MAPEM(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor)
215
+ self.reconPhantom, self.indices = MAPEM(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor, show_logs=show_logs)
63
216
  elif self.optimizer.value == OptimizerType.DEPIERRO95.value:
64
- self.reconPhantom = self._DEPIERRO(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor)
217
+ self.reconPhantom, self.indices = DEPIERRO(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor, show_logs=show_logs)
65
218
  else:
66
219
  raise ValueError(f"Unknown optimizer type: {self.optimizer.value}")
67
220
  else:
68
221
  if self.experiment.AOsignal_withoutTumor is None:
69
222
  raise ValueError("AO signal without tumor is not available. Please generate AO signal without tumor the experiment first in the experiment object.")
70
223
  if self.optimizer.value == OptimizerType.PPGMLEM.value:
71
- self.reconLaser = self._MAPEM_STOP(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor)
224
+ self.reconLaser, self.indices = MAPEM_STOP(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor, show_logs=show_logs)
72
225
  elif self.optimizer.value == OptimizerType.PGC.value:
73
- self.reconLaser = self._MAPEM(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor)
226
+ self.reconLaser, self.indices = MAPEM(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor, show_logs=show_logs)
74
227
  elif self.optimizer.value == OptimizerType.DEPIERRO95.value:
75
- self.reconLaser = self._DEPIERRO(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor)
76
- else:
77
- raise ValueError(f"Unknown optimizer type: {self.optimizer.value}")
78
-
79
- def _MAPEM_STOP(self, SMatrix, y, withTumor):
80
- """
81
- This method implements the MAPEM_STOP algorithm using either CPU or single-GPU PyTorch acceleration.
82
- Multi-GPU and Multi-CPU modes are not implemented for this algorithm.
83
- """
84
- result = None
85
- required_memory = calculate_memory_requirement(SMatrix, y)
86
-
87
- if self.isGPU:
88
- if check_gpu_memory(config.select_best_gpu(), required_memory):
89
- try:
90
- result = MAPEM._MAPEM_GPU_STOP(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, delta=self.delta, gamma=self.gamma, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
91
- except Exception as e:
92
- warnings.warn(f"Falling back to CPU implementation due to an error in GPU implementation: {e}")
93
- else:
94
- warnings.warn("Insufficient GPU memory for single GPU MAPEM_STOP. Falling back to CPU.")
95
-
96
- if result is None:
97
- try:
98
- result = MAPEM._MAPEM_CPU_STOP(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, delta=self.delta, gamma=self.gamma, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
99
- except Exception as e:
100
- warnings.warn(f"An error occurred in CPU implementation: {e}")
101
- result = None
102
-
103
- return result
104
-
105
- def _MAPEM(self, SMatrix, y, withTumor):
106
- """
107
- This method implements the MAPEM algorithm using either CPU or single-GPU PyTorch acceleration.
108
- Multi-GPU and Multi-CPU modes are not implemented for this algorithm.
109
- """
110
- result = None
111
- required_memory = calculate_memory_requirement(SMatrix, y)
112
-
113
- if self.isGPU:
114
- if check_gpu_memory(config.select_best_gpu(), required_memory):
115
- try:
116
- result = MAPEM._MAPEM_GPU(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, delta=self.delta, gamma=self.gamma, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
117
- except Exception as e:
118
- warnings.warn(f"Falling back to CPU implementation due to an error in GPU implementation: {e}")
228
+ self.reconLaser, self.indices = DEPIERRO(SMatrix=self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor, show_logs=show_logs)
119
229
  else:
120
- warnings.warn("Insufficient GPU memory for single GPU MAPEM. Falling back to CPU.")
121
-
122
- if result is None:
123
- try:
124
- result = MAPEM._MAPEM_CPU(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, delta=self.delta, gamma=self.gamma, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
125
- except Exception as e:
126
- warnings.warn(f"An error occurred in CPU implementation: {e}")
127
- result = None
128
-
129
- return result
130
-
131
- def _DEPIERRO(self, SMatrix, y, withTumor):
132
- """
133
- This method implements the DEPIERRO algorithm using either CPU or single-GPU PyTorch acceleration.
134
- Multi-GPU and Multi-CPU modes are not implemented for this algorithm.
135
- """
136
- result = None
137
- required_memory = calculate_memory_requirement(SMatrix, y)
138
-
139
- if self.isGPU:
140
- if check_gpu_memory(config.select_best_gpu(), required_memory):
141
- try:
142
- result = DEPIERRO._DEPIERRO_GPU(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
143
- except Exception as e:
144
- warnings.warn(f"Falling back to CPU implementation due to an error in GPU implementation: {e}")
145
- else:
146
- warnings.warn("Insufficient GPU memory for single GPU DEPIERRO. Falling back to CPU.")
147
-
148
- if result is None:
149
- try:
150
- result = DEPIERRO._DEPIERRO_CPU(SMatrix=SMatrix, y=y, Omega=self.potentialFunction, numIterations=self.numIterations, beta=self.beta, sigma=self.sigma, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
151
- except Exception as e:
152
- warnings.warn(f"An error occurred in CPU implementation: {e}")
153
- result = None
154
-
155
- return result
156
-
230
+ raise ValueError(f"Unknown optimizer type: {self.optimizer.value}")
@@ -29,4 +29,7 @@ class DeepLearningRecon(Recon):
29
29
  pass
30
30
 
31
31
  def _deepLearningReconPython(self):
32
- pass
32
+ pass
33
+
34
+ def checkExistingFile(self, date = None):
35
+ raise NotImplementedError("checkExistingFile method is not implemented yet.")
@@ -1,19 +1,23 @@
1
1
  from AOT_biomaps.AOT_Recon.AlgebraicRecon import AlgebraicRecon
2
- from AOT_biomaps.AOT_Recon.ReconEnums import ReconType, ProcessType, NoiseType
3
- from AOT_biomaps.AOT_Recon.AOT_Optimizers.PDHG import chambolle_pock_TV_cpu, chambolle_pock_TV_gpu, chambolle_pock_KL_cpu, chambolle_pock_KL_gpu
2
+ from AOT_biomaps.AOT_Recon.ReconEnums import ReconType, ProcessType
3
+ from AOT_biomaps.AOT_Recon.AOT_Optimizers import CP_KL, CP_TV
4
+ from AOT_biomaps.AOT_Recon.ReconEnums import OptimizerType
5
+
6
+ import os
7
+ from datetime import datetime
8
+ import numpy as np
9
+ import re
4
10
 
5
11
  class PrimalDualRecon(AlgebraicRecon):
6
12
  """
7
13
  This class implements the convex reconstruction process.
8
14
  It currently does not perform any operations but serves as a template for future implementations.
9
15
  """
10
- def __init__(self, alpha, theta=1.0, L=None, noiseModel = NoiseType.GAUSSIAN,**kwargs):
16
+ def __init__(self, theta=1.0, L=None, **kwargs):
11
17
  super().__init__(**kwargs)
12
18
  self.reconType = ReconType.Convex
13
- self.alpha = alpha # regularization parameter
14
19
  self.theta = theta # relaxation parameter (between 1 and 2)
15
20
  self.L = L # norme spectrale de l'opérateur linéaire défini par les matrices P et P^T
16
- self.noiseModel = noiseModel
17
21
 
18
22
  def run(self, processType=ProcessType.PYTHON, withTumor=True):
19
23
  """
@@ -28,37 +32,177 @@ class PrimalDualRecon(AlgebraicRecon):
28
32
  raise ValueError(f"Unknown convex reconstruction type: {processType}")
29
33
 
30
34
  def _convexReconCASToR(self, withTumor):
31
- pass
35
+ raise NotImplementedError("CASToR convex reconstruction is not implemented yet.")
32
36
 
33
- def _convexReconPython(self, withTumor):
34
- if withTumor:
35
- self.reconPhantom = self._chambolle_pock(self.SMatrix, y=self.experiment.AOsignal_withTumor, withTumor=withTumor)
36
- else:
37
- self.reconLaser = self._chambolle_pock(self.SMatrix, y=self.experiment.AOsignal_withoutTumor, withTumor=withTumor)
38
-
39
- def _chambolle_pock(self, SMatrix, y, withTumor):
40
- if self.isGPU:
41
- try:
42
- if self.noiseModel == NoiseType.GAUSSIAN:
43
- return chambolle_pock_TV_gpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
44
- elif self.noiseModel == NoiseType.POISSON:
45
- return chambolle_pock_KL_gpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
37
+
38
+ def checkExistingFile(self, date = None):
39
+ """
40
+ Check if the file already exists, based on current instance parameters.
41
+ Returns:
42
+ tuple: (bool: whether to save, str: the filepath)
43
+ """
44
+ if date is None:
45
+ date = datetime.now().strftime("%d%m")
46
+ results_dir = os.path.join(
47
+ self.saveDir,
48
+ f'results_{date}_{self.optimizer.value}_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
49
+ )
50
+ os.makedirs(results_dir, exist_ok=True)
51
+
52
+ if os.path.exists(os.path.join(results_dir,"indices.npy")):
53
+ return (True, results_dir)
54
+
55
+ return (False, results_dir)
56
+
57
+ def load(self, withTumor=True, results_date=None, optimizer=None, filePath=None, show_logs=True):
58
+ """
59
+ Load the reconstruction results (reconPhantom or reconLaser) and indices as lists of 2D np arrays into self.
60
+ If the loaded file is a 3D array, it is split into a list of 2D arrays.
61
+ Args:
62
+ withTumor: If True, loads reconPhantom (with tumor), else reconLaser (without tumor).
63
+ results_date: Date string (format "ddmm") to specify which results to load. If None, uses the most recent date in saveDir.
64
+ optimizer: Optimizer name (as string or enum) to filter results. If None, uses the current optimizer of the instance.
65
+ filePath: Optional. If provided, loads directly from this path (overrides saveDir and results_date).
66
+ """
67
+ if filePath is not None:
68
+ # Mode chargement direct depuis un fichier
69
+ recon_key = 'reconPhantom' if withTumor else 'reconLaser'
70
+ recon_path = filePath
71
+ if not os.path.exists(recon_path):
72
+ raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
73
+ # Charge les données
74
+ data = np.load(recon_path, allow_pickle=True)
75
+ # Découpe en liste de 2D si c'est un tableau 3D
76
+ if isinstance(data, np.ndarray) and data.ndim == 3:
77
+ if withTumor:
78
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
79
+ else:
80
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
81
+ else:
82
+ # Sinon, suppose que c'est déjà une liste de 2D
83
+ if withTumor:
84
+ self.reconPhantom = data
46
85
  else:
47
- raise ValueError(f"Noise model must be either GAUSSIAN or POISSON, got {self.noiseModel}")
48
- except:
49
- if self.noiseModel == NoiseType.GAUSSIAN:
50
- return chambolle_pock_TV_cpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
51
- elif self.noiseModel == NoiseType.POISSON:
52
- return chambolle_pock_KL_cpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
86
+ self.reconLaser = data
87
+ # Essayer de charger les indices
88
+ base_dir, _ = os.path.split(recon_path)
89
+ indices_path = os.path.join(base_dir, "indices.npy")
90
+ if os.path.exists(indices_path):
91
+ indices_data = np.load(indices_path, allow_pickle=True)
92
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
93
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
53
94
  else:
54
- raise ValueError(f"Noise model must be either GAUSSIAN or POISSON, got {self.noiseModel}")
95
+ self.indices = indices_data
96
+ else:
97
+ self.indices = None
98
+ if show_logs:
99
+ print(f"Loaded reconstruction results and indices from {recon_path}")
55
100
  else:
56
- if self.noiseModel == NoiseType.GAUSSIAN:
57
- return chambolle_pock_TV_cpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
58
- elif self.noiseModel == NoiseType.POISSON:
59
- return chambolle_pock_KL_cpu(SMatrix=SMatrix, y=y, alpha=self.alpha, theta=self.theta, numIterations=self.numIterations, L=self.L, isSavingEachIteration=self.isSavingEachIteration, withTumor=withTumor)
101
+ # Mode chargement depuis le répertoire de résultats
102
+ if self.saveDir is None:
103
+ raise ValueError("Save directory is not specified. Please set saveDir before loading.")
104
+ # Determine optimizer name for path matching
105
+ opt_name = optimizer.value if optimizer is not None else self.optimizer.value
106
+ # Find the most recent results directory if no date is specified
107
+ dir_pattern = f'results_*_{opt_name}'
108
+ if opt_name == OptimizerType.CP_TV.value or opt_name == OptimizerType.CP_KL.value:
109
+ dir_pattern += f'_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
110
+ if results_date is None:
111
+ dirs = [d for d in os.listdir(self.saveDir) if os.path.isdir(os.path.join(self.saveDir, d)) and dir_pattern in d]
112
+ if not dirs:
113
+ raise FileNotFoundError(f"No matching results directory found for pattern '{dir_pattern}' in {self.saveDir}.")
114
+ dirs.sort(reverse=True) # Most recent first
115
+ results_dir = os.path.join(self.saveDir, dirs[0])
116
+ else:
117
+ results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}')
118
+ if opt_name == OptimizerType.CP_TV.value or opt_name == OptimizerType.CP_KL.value:
119
+ results_dir += f'_Alpha_{self.alpha}_Theta_{self.theta}_L_{self.L}'
120
+ if not os.path.exists(results_dir):
121
+ raise FileNotFoundError(f"Directory {results_dir} does not exist.")
122
+ # Load reconstruction results
123
+ recon_key = 'reconPhantom' if withTumor else 'reconLaser'
124
+ recon_path = os.path.join(results_dir, f'{recon_key}.npy')
125
+ if not os.path.exists(recon_path):
126
+ raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
127
+ data = np.load(recon_path, allow_pickle=True)
128
+ # Découpe en liste de 2D si c'est un tableau 3D
129
+ if isinstance(data, np.ndarray) and data.ndim == 3:
130
+ if withTumor:
131
+ self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
132
+ else:
133
+ self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
134
+ else:
135
+ if withTumor:
136
+ self.reconPhantom = data
137
+ else:
138
+ self.reconLaser = data
139
+ # Try to load saved indices (if file exists)
140
+ indices_path = os.path.join(results_dir, 'indices.npy')
141
+ if os.path.exists(indices_path):
142
+ indices_data = np.load(indices_path, allow_pickle=True)
143
+ if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
144
+ self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
145
+ else:
146
+ self.indices = indices_data
60
147
  else:
61
- raise ValueError(f"Noise model must be either GAUSSIAN or POISSON, got {self.noiseModel}")
148
+ self.indices = None
149
+ if show_logs:
150
+ print(f"Loaded reconstruction results and indices from {results_dir}")
151
+
152
+ def _convexReconPython(self, withTumor):
153
+ if self.optimizer == OptimizerType.CP_TV:
154
+ if withTumor:
155
+ self.reconPhantom, self.indices = CP_TV(
156
+ self.SMatrix,
157
+ y=self.experiment.AOsignal_withTumor,
158
+ alpha=self.alpha,
159
+ theta=self.theta,
160
+ numIterations=self.numIterations,
161
+ isSavingEachIteration=self.isSavingEachIteration,
162
+ L=self.L,
163
+ withTumor=withTumor,
164
+ device=None
165
+ )
166
+ else:
167
+ self.reconLaser, self.indices = CP_TV(
168
+ self.SMatrix,
169
+ y=self.experiment.AOsignal_withoutTumor,
170
+ alpha=self.alpha,
171
+ theta=self.theta,
172
+ numIterations=self.numIterations,
173
+ isSavingEachIteration=self.isSavingEachIteration,
174
+ L=self.L,
175
+ withTumor=withTumor,
176
+ device=None
177
+ )
178
+ elif self.optimizer == OptimizerType.CP_KL:
179
+ if withTumor:
180
+ self.reconPhantom, self.indices = CP_KL(
181
+ self.SMatrix,
182
+ y=self.experiment.AOsignal_withTumor,
183
+ alpha=self.alpha,
184
+ theta=self.theta,
185
+ numIterations=self.numIterations,
186
+ isSavingEachIteration=self.isSavingEachIteration,
187
+ L=self.L,
188
+ withTumor=withTumor,
189
+ device=None
190
+ )
191
+ else:
192
+ self.reconLaser, self.indices = CP_KL(
193
+ self.SMatrix,
194
+ y=self.experiment.AOsignal_withoutTumor,
195
+ alpha=self.alpha,
196
+ theta=self.theta,
197
+ numIterations=self.numIterations,
198
+ isSavingEachIteration=self.isSavingEachIteration,
199
+ L=self.L,
200
+ withTumor=withTumor,
201
+ device=None
202
+ )
203
+ else:
204
+ raise ValueError(f"Optimizer value must be CP_TV or CP_KL, got {self.optimizer}")
205
+
62
206
 
63
207
 
64
208