AOT-biomaps 2.9.212__py3-none-any.whl → 2.9.233__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of AOT-biomaps might be problematic. Click here for more details.
- AOT_biomaps/AOT_Experiment/Tomography.py +70 -0
- AOT_biomaps/AOT_Experiment/_mainExperiment.py +41 -22
- AOT_biomaps/AOT_Recon/AOT_Optimizers/DEPIERRO.py +48 -11
- AOT_biomaps/AOT_Recon/AOT_Optimizers/LS.py +9 -6
- AOT_biomaps/AOT_Recon/AOT_Optimizers/MAPEM.py +118 -38
- AOT_biomaps/AOT_Recon/AOT_Optimizers/MLEM.py +157 -86
- AOT_biomaps/AOT_Recon/AOT_PotentialFunctions/RelativeDifferences.py +10 -14
- AOT_biomaps/AOT_Recon/AlgebraicRecon.py +337 -185
- AOT_biomaps/AOT_Recon/BayesianRecon.py +33 -96
- AOT_biomaps/AOT_Recon/PrimalDualRecon.py +14 -18
- AOT_biomaps/AOT_Recon/ReconEnums.py +14 -0
- AOT_biomaps/AOT_Recon/ReconTools.py +4 -3
- AOT_biomaps/AOT_Recon/_mainRecon.py +3 -2
- AOT_biomaps/__init__.py +22 -1
- {aot_biomaps-2.9.212.dist-info → aot_biomaps-2.9.233.dist-info}/METADATA +1 -1
- {aot_biomaps-2.9.212.dist-info → aot_biomaps-2.9.233.dist-info}/RECORD +18 -18
- {aot_biomaps-2.9.212.dist-info → aot_biomaps-2.9.233.dist-info}/WHEEL +0 -0
- {aot_biomaps-2.9.212.dist-info → aot_biomaps-2.9.233.dist-info}/top_level.txt +0 -0
|
@@ -1,27 +1,27 @@
|
|
|
1
1
|
from ._mainRecon import Recon
|
|
2
|
-
from .ReconEnums import ReconType, OptimizerType, ProcessType
|
|
2
|
+
from .ReconEnums import ReconType, OptimizerType, ProcessType, SparsingType
|
|
3
3
|
from .AOT_Optimizers import MLEM, LS
|
|
4
|
-
from .ReconTools import check_gpu_memory, calculate_memory_requirement, mse, load_recon
|
|
5
4
|
from AOT_biomaps.Config import config
|
|
6
5
|
|
|
7
6
|
import os
|
|
8
|
-
import sys
|
|
9
7
|
import subprocess
|
|
10
|
-
import warnings
|
|
11
8
|
import numpy as np
|
|
12
9
|
import matplotlib.pyplot as plt
|
|
13
10
|
import matplotlib.animation as animation
|
|
14
11
|
from IPython.display import HTML
|
|
15
12
|
from datetime import datetime
|
|
16
13
|
from tempfile import gettempdir
|
|
17
|
-
import
|
|
14
|
+
import cupy as cp
|
|
15
|
+
import cupyx.scipy.sparse as cpsparse
|
|
16
|
+
import gc
|
|
17
|
+
from tqdm import trange
|
|
18
18
|
|
|
19
19
|
class AlgebraicRecon(Recon):
|
|
20
20
|
"""
|
|
21
21
|
This class implements the Algebraic reconstruction process.
|
|
22
22
|
It currently does not perform any operations but serves as a template for future implementations.
|
|
23
23
|
"""
|
|
24
|
-
def __init__(self, opti = OptimizerType.MLEM, numIterations = 10000, numSubsets = 1, isSavingEachIteration=True, maxSaves = 5000, alpha = None, **kwargs):
|
|
24
|
+
def __init__(self, opti = OptimizerType.MLEM, numIterations = 10000, numSubsets = 1, isSavingEachIteration=True, maxSaves = 5000, alpha = None, denominatorThreshold = 1e-6, useSparseSMatrix=True, sparseType = SparsingType.CSR, sparseThreshold=0.1, device = None, **kwargs):
|
|
25
25
|
super().__init__(**kwargs)
|
|
26
26
|
self.reconType = ReconType.Algebraic
|
|
27
27
|
self.optimizer = opti
|
|
@@ -32,7 +32,14 @@ class AlgebraicRecon(Recon):
|
|
|
32
32
|
self.numSubsets = numSubsets
|
|
33
33
|
self.isSavingEachIteration = isSavingEachIteration
|
|
34
34
|
self.maxSaves = maxSaves
|
|
35
|
+
self.denominatorThreshold = denominatorThreshold
|
|
35
36
|
self.alpha = alpha # Regularization parameter for LS
|
|
37
|
+
self.device = device
|
|
38
|
+
self.SMatrix_sparse = None # Sparse system matrix
|
|
39
|
+
self.sparseThreshold = sparseThreshold
|
|
40
|
+
self.useSparseSMatrix = useSparseSMatrix # Whether to use sparse SMatrix in optimizers
|
|
41
|
+
self.sparseType = sparseType
|
|
42
|
+
self.Z_dim = None # Used for sparse matrix reconstruction
|
|
36
43
|
|
|
37
44
|
if self.numIterations <= 0:
|
|
38
45
|
raise ValueError("Number of iterations must be greater than 0.")
|
|
@@ -44,143 +51,29 @@ class AlgebraicRecon(Recon):
|
|
|
44
51
|
raise TypeError("Number of subsets must be an integer.")
|
|
45
52
|
|
|
46
53
|
print("Generating system matrix (processing acoustic fields)...")
|
|
47
|
-
self.SMatrix =
|
|
54
|
+
self.SMatrix = self._sparseSMatrix()
|
|
48
55
|
|
|
49
56
|
# PUBLIC METHODS
|
|
50
57
|
|
|
51
|
-
def run(self, processType = ProcessType.PYTHON, withTumor= True):
|
|
58
|
+
def run(self, processType = ProcessType.PYTHON, withTumor= True, show_logs=True):
|
|
52
59
|
"""
|
|
53
60
|
This method is a placeholder for the Algebraic reconstruction process.
|
|
54
61
|
It currently does not perform any operations but serves as a template for future implementations.
|
|
55
62
|
"""
|
|
56
|
-
|
|
57
63
|
if(processType == ProcessType.CASToR):
|
|
58
|
-
self._AlgebraicReconCASToR(withTumor)
|
|
64
|
+
self._AlgebraicReconCASToR(withTumor=withTumor, show_logs=show_logs)
|
|
59
65
|
elif(processType == ProcessType.PYTHON):
|
|
60
|
-
self._AlgebraicReconPython(withTumor)
|
|
66
|
+
self._AlgebraicReconPython(withTumor=withTumor, show_logs=show_logs)
|
|
61
67
|
else:
|
|
62
68
|
raise ValueError(f"Unknown Algebraic reconstruction type: {processType}")
|
|
69
|
+
|
|
70
|
+
def sparse_SMatrix(self):
|
|
71
|
+
if self.sparseType == SparsingType.CSR:
|
|
72
|
+
self.SMatrix_sparse, self.Z_dim = self._sparseSMatrix_CSR(self.experiment.AcousticFields, threshold=self.sparseThreshold)
|
|
73
|
+
if self.sparseType == SparsingType.COO:
|
|
74
|
+
raise NotImplementedError("COO sparse matrix not implemented yet.")
|
|
63
75
|
|
|
64
|
-
def
|
|
65
|
-
# Détermine le dossier et le préfixe des fichiers
|
|
66
|
-
folder = 'results_withTumor' if withTumor else 'results_withoutTumor'
|
|
67
|
-
folder_path = os.path.join(self.saveDir, folder)
|
|
68
|
-
|
|
69
|
-
# Liste tous les fichiers .img dans le dossier
|
|
70
|
-
img_files = [
|
|
71
|
-
f for f in os.listdir(folder_path)
|
|
72
|
-
if f.endswith('.img') and f.startswith(folder)
|
|
73
|
-
]
|
|
74
|
-
|
|
75
|
-
# Fonction pour extraire le numéro d'itération (ex: "it56" → 56)
|
|
76
|
-
def get_iteration(filename):
|
|
77
|
-
match = re.search(r'_it(\d+)\.img$', filename)
|
|
78
|
-
return int(match.group(1)) if match else float('inf') # Retourne l'infini pour les fichiers invalides (ils seront à la fin)
|
|
79
|
-
|
|
80
|
-
# Trie les fichiers par numéro d'itération (croissant)
|
|
81
|
-
sorted_files = sorted(img_files, key=get_iteration)
|
|
82
|
-
|
|
83
|
-
# Charge les données et remplit self.reconPhantom/self.reconLaser + self.indices
|
|
84
|
-
for file in sorted_files:
|
|
85
|
-
# Chemin complet du fichier .hdr correspondant
|
|
86
|
-
hdr_path = os.path.join(folder_path, file.replace('.img', '.hdr'))
|
|
87
|
-
|
|
88
|
-
# Vérifie que le .hdr existe avant de charger
|
|
89
|
-
if os.path.exists(hdr_path):
|
|
90
|
-
theta = load_recon(hdr_path)
|
|
91
|
-
iteration = get_iteration(file)
|
|
92
|
-
|
|
93
|
-
if iteration != float('inf'): # Ignore les fichiers mal formatés
|
|
94
|
-
if withTumor:
|
|
95
|
-
self.reconPhantom.append(theta)
|
|
96
|
-
else:
|
|
97
|
-
self.reconLaser.append(theta)
|
|
98
|
-
self.indices.append(iteration)
|
|
99
|
-
|
|
100
|
-
def load_reconPython(self, withTumor=True, results_date=None, optimizer=None, filePath=None):
|
|
101
|
-
if filePath is not None:
|
|
102
|
-
# Mode chargement direct depuis un fichier
|
|
103
|
-
recon_key = 'reconPhantom' if withTumor else 'reconLaser'
|
|
104
|
-
recon_path = filePath
|
|
105
|
-
if not os.path.exists(recon_path):
|
|
106
|
-
raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
|
|
107
|
-
# Charge les données
|
|
108
|
-
data = np.load(recon_path, allow_pickle=True)
|
|
109
|
-
# Découpe en liste de 2D si c'est un tableau 3D
|
|
110
|
-
if isinstance(data, np.ndarray) and data.ndim == 3:
|
|
111
|
-
if withTumor:
|
|
112
|
-
self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
|
|
113
|
-
else:
|
|
114
|
-
self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
|
|
115
|
-
else:
|
|
116
|
-
# Si ce n'est pas un tableau 3D, on suppose que c'est déjà une liste de 2D
|
|
117
|
-
if withTumor:
|
|
118
|
-
self.reconPhantom = data
|
|
119
|
-
else:
|
|
120
|
-
self.reconLaser = data
|
|
121
|
-
# Essayer de charger les indices
|
|
122
|
-
base_dir, file_name = os.path.split(recon_path)
|
|
123
|
-
file_base, _ = os.path.splitext(file_name)
|
|
124
|
-
indices_path = os.path.join(base_dir, f"indices.npy")
|
|
125
|
-
if os.path.exists(indices_path):
|
|
126
|
-
indices_data = np.load(indices_path, allow_pickle=True)
|
|
127
|
-
if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
|
|
128
|
-
self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
|
|
129
|
-
else:
|
|
130
|
-
self.indices = indices_data
|
|
131
|
-
else:
|
|
132
|
-
self.indices = None
|
|
133
|
-
print(f"Loaded reconstruction results and indices from {recon_path}")
|
|
134
|
-
else:
|
|
135
|
-
# Mode chargement depuis le répertoire de résultats
|
|
136
|
-
if self.saveDir is None:
|
|
137
|
-
raise ValueError("Save directory is not specified. Please set saveDir before loading.")
|
|
138
|
-
# Determine optimizer name for path matching
|
|
139
|
-
opt_name = optimizer.value if optimizer is not None else self.optimizer.value
|
|
140
|
-
# Find the most recent results directory if no date is specified
|
|
141
|
-
if results_date is None:
|
|
142
|
-
dirs = [
|
|
143
|
-
d for d in os.listdir(self.saveDir)
|
|
144
|
-
if os.path.isdir(os.path.join(self.saveDir, d))
|
|
145
|
-
and re.match(r'results_\d{4}_' + re.escape(opt_name) + r'($|_)', d)
|
|
146
|
-
]
|
|
147
|
-
if not dirs:
|
|
148
|
-
raise FileNotFoundError(f"No results directory found for optimizer '{opt_name}' in {self.saveDir}.")
|
|
149
|
-
dirs.sort(reverse=True) # Most recent first
|
|
150
|
-
results_dir = os.path.join(self.saveDir, dirs[0])
|
|
151
|
-
else:
|
|
152
|
-
results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}')
|
|
153
|
-
if not os.path.exists(results_dir):
|
|
154
|
-
raise FileNotFoundError(f"Directory {results_dir} does not exist.")
|
|
155
|
-
# Load reconstruction results
|
|
156
|
-
recon_key = 'reconPhantom' if withTumor else 'reconLaser'
|
|
157
|
-
recon_path = os.path.join(results_dir, f'{recon_key}.npy')
|
|
158
|
-
if not os.path.exists(recon_path):
|
|
159
|
-
raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
|
|
160
|
-
data = np.load(recon_path, allow_pickle=True)
|
|
161
|
-
# Découpe en liste de 2D si c'est un tableau 3D
|
|
162
|
-
if isinstance(data, np.ndarray) and data.ndim == 3:
|
|
163
|
-
if withTumor:
|
|
164
|
-
self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
|
|
165
|
-
else:
|
|
166
|
-
self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
|
|
167
|
-
else:
|
|
168
|
-
if withTumor:
|
|
169
|
-
self.reconPhantom = data
|
|
170
|
-
else:
|
|
171
|
-
self.reconLaser = data
|
|
172
|
-
# Try to load saved indices (if file exists)
|
|
173
|
-
indices_path = os.path.join(results_dir, 'indices.npy')
|
|
174
|
-
if os.path.exists(indices_path):
|
|
175
|
-
indices_data = np.load(indices_path, allow_pickle=True)
|
|
176
|
-
if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
|
|
177
|
-
self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
|
|
178
|
-
else:
|
|
179
|
-
self.indices = indices_data
|
|
180
|
-
else:
|
|
181
|
-
self.indices = None
|
|
182
|
-
|
|
183
|
-
def plot_MSE(self, isSaving=True, log_scale_x=False, log_scale_y=False):
|
|
76
|
+
def plot_MSE(self, isSaving=True, log_scale_x=False, log_scale_y=False, show_logs=True):
|
|
184
77
|
"""
|
|
185
78
|
Plot the Mean Squared Error (MSE) of the reconstruction.
|
|
186
79
|
|
|
@@ -195,8 +88,8 @@ class AlgebraicRecon(Recon):
|
|
|
195
88
|
raise ValueError("MSE is empty. Please calculate MSE first.")
|
|
196
89
|
|
|
197
90
|
best_idx = self.indices[np.argmin(self.MSE)]
|
|
198
|
-
|
|
199
|
-
|
|
91
|
+
if show_logs:
|
|
92
|
+
print(f"Lowest MSE = {np.min(self.MSE):.4f} at iteration {best_idx+1}")
|
|
200
93
|
# Plot MSE curve
|
|
201
94
|
plt.figure(figsize=(7, 5))
|
|
202
95
|
plt.plot(self.indices, self.MSE, 'r-', label="MSE curve")
|
|
@@ -225,17 +118,17 @@ class AlgebraicRecon(Recon):
|
|
|
225
118
|
scale_str = "_logy"
|
|
226
119
|
SavingFolder = os.path.join(self.saveDir, f'{self.SMatrix.shape[3]}_SCANS_MSE_plot_{self.optimizer.name}_{scale_str}{date_str}.png')
|
|
227
120
|
plt.savefig(SavingFolder, dpi=300)
|
|
228
|
-
|
|
121
|
+
if show_logs:
|
|
122
|
+
print(f"MSE plot saved to {SavingFolder}")
|
|
229
123
|
|
|
230
124
|
plt.show()
|
|
231
125
|
|
|
232
|
-
def show_MSE_bestRecon(self, isSaving=True):
|
|
126
|
+
def show_MSE_bestRecon(self, isSaving=True, show_logs=True):
|
|
233
127
|
if not self.MSE:
|
|
234
128
|
raise ValueError("MSE is empty. Please calculate MSE first.")
|
|
235
129
|
|
|
236
130
|
|
|
237
131
|
best_idx = np.argmin(self.MSE)
|
|
238
|
-
print(best_idx)
|
|
239
132
|
best_recon = self.reconPhantom[best_idx]
|
|
240
133
|
|
|
241
134
|
# Crée la figure et les axes
|
|
@@ -264,7 +157,6 @@ class AlgebraicRecon(Recon):
|
|
|
264
157
|
|
|
265
158
|
# Right: Reconstruction at last iteration
|
|
266
159
|
lastRecon = self.reconPhantom[-1]
|
|
267
|
-
print(lastRecon.shape)
|
|
268
160
|
if self.experiment.OpticImage.phantom.shape != lastRecon.shape:
|
|
269
161
|
lastRecon = lastRecon.T
|
|
270
162
|
im2 = axs[2].imshow(lastRecon,
|
|
@@ -293,11 +185,12 @@ class AlgebraicRecon(Recon):
|
|
|
293
185
|
os.makedirs(savePath)
|
|
294
186
|
SavingFolder = os.path.join(self.saveDir, f'{self.SMatrix.shape[3]}_SCANS_comparison_MSE_BestANDLastRecon_{self.optimizer.name}_{date_str}.png')
|
|
295
187
|
plt.savefig(SavingFolder, dpi=300, bbox_inches='tight')
|
|
296
|
-
|
|
188
|
+
if show_logs:
|
|
189
|
+
print(f"MSE plot saved to {SavingFolder}")
|
|
297
190
|
|
|
298
191
|
plt.show()
|
|
299
192
|
|
|
300
|
-
def show_theta_animation(self, vmin=None, vmax=None, total_duration_ms=3000, save_path=None, max_frames=1000, isPropMSE=True):
|
|
193
|
+
def show_theta_animation(self, vmin=None, vmax=None, total_duration_ms=3000, save_path=None, max_frames=1000, isPropMSE=True, show_logs=True):
|
|
301
194
|
"""
|
|
302
195
|
Show theta iteration animation with speed proportional to MSE acceleration.
|
|
303
196
|
In "propMSE" mode: slow down when MSE changes rapidly, speed up when MSE stagnates.
|
|
@@ -400,18 +293,19 @@ class AlgebraicRecon(Recon):
|
|
|
400
293
|
ani.save(save_path, writer=animation.PillowWriter(fps=100))
|
|
401
294
|
elif save_path.endswith(".mp4"):
|
|
402
295
|
ani.save(save_path, writer="ffmpeg", fps=30)
|
|
403
|
-
|
|
296
|
+
if show_logs:
|
|
297
|
+
print(f"Animation saved to {save_path}")
|
|
404
298
|
|
|
405
299
|
plt.close(fig)
|
|
406
300
|
return HTML(ani.to_jshtml())
|
|
407
301
|
|
|
408
|
-
def plot_SSIM(self, isSaving=True, log_scale_x=False, log_scale_y=False):
|
|
302
|
+
def plot_SSIM(self, isSaving=True, log_scale_x=False, log_scale_y=False, show_logs=True):
|
|
409
303
|
if not self.SSIM:
|
|
410
304
|
raise ValueError("SSIM is empty. Please calculate SSIM first.")
|
|
411
305
|
|
|
412
306
|
best_idx = self.indices[np.argmax(self.SSIM)]
|
|
413
|
-
|
|
414
|
-
|
|
307
|
+
if show_logs:
|
|
308
|
+
print(f"Highest SSIM = {np.max(self.SSIM):.4f} at iteration {best_idx+1}")
|
|
415
309
|
# Plot SSIM curve
|
|
416
310
|
plt.figure(figsize=(7, 5))
|
|
417
311
|
plt.plot(self.indices, self.SSIM, 'r-', label="SSIM curve")
|
|
@@ -440,11 +334,12 @@ class AlgebraicRecon(Recon):
|
|
|
440
334
|
scale_str = "_logy"
|
|
441
335
|
SavingFolder = os.path.join(self.saveDir, f'{self.SMatrix.shape[3]}_SCANS_SSIM_plot_{self.optimizer.name}_{scale_str}{date_str}.png')
|
|
442
336
|
plt.savefig(SavingFolder, dpi=300)
|
|
443
|
-
|
|
337
|
+
if show_logs:
|
|
338
|
+
print(f"SSIM plot saved to {SavingFolder}")
|
|
444
339
|
|
|
445
340
|
plt.show()
|
|
446
341
|
|
|
447
|
-
def show_SSIM_bestRecon(self, isSaving=True):
|
|
342
|
+
def show_SSIM_bestRecon(self, isSaving=True, show_logs=True):
|
|
448
343
|
|
|
449
344
|
if not self.SSIM:
|
|
450
345
|
raise ValueError("SSIM is empty. Please calculate SSIM first.")
|
|
@@ -489,10 +384,11 @@ class AlgebraicRecon(Recon):
|
|
|
489
384
|
date_str = now.strftime("%Y_%d_%m_%y")
|
|
490
385
|
SavingFolder = os.path.join(self.saveDir, f'{self.SMatrix.shape[3]}_SCANS_comparison_SSIM_BestANDLastRecon_{self.optimizer.name}_{date_str}.png')
|
|
491
386
|
plt.savefig(SavingFolder, dpi=300)
|
|
492
|
-
|
|
387
|
+
if show_logs:
|
|
388
|
+
print(f"SSIM plot saved to {SavingFolder}")
|
|
493
389
|
plt.show()
|
|
494
390
|
|
|
495
|
-
def plot_CRC_vs_Noise(self, use_ROI=True, fin=None, isSaving=True):
|
|
391
|
+
def plot_CRC_vs_Noise(self, use_ROI=True, fin=None, isSaving=True, show_logs=True):
|
|
496
392
|
"""
|
|
497
393
|
Plot CRC (Contrast Recovery Coefficient) vs Noise for each iteration.
|
|
498
394
|
"""
|
|
@@ -540,10 +436,11 @@ class AlgebraicRecon(Recon):
|
|
|
540
436
|
date_str = now.strftime("%Y_%d_%m_%y")
|
|
541
437
|
SavingFolder = os.path.join(self.saveDir, f'{self.SMatrix.shape[3]}_SCANS_CRCvsNOISE_{self.optimizer.name}_{date_str}.png')
|
|
542
438
|
plt.savefig(SavingFolder, dpi=300)
|
|
543
|
-
|
|
439
|
+
if show_logs:
|
|
440
|
+
print(f"CRCvsNOISE plot saved to {SavingFolder}")
|
|
544
441
|
plt.show()
|
|
545
442
|
|
|
546
|
-
def show_reconstruction_progress(self, start=0, fin=None, save_path=None, with_tumor=True):
|
|
443
|
+
def show_reconstruction_progress(self, start=0, fin=None, save_path=None, with_tumor=True, show_logs=True):
|
|
547
444
|
"""
|
|
548
445
|
Show the reconstruction progress for either with or without tumor.
|
|
549
446
|
If isPropMSE is True, the frame selection is adapted to MSE changes.
|
|
@@ -648,7 +545,8 @@ class AlgebraicRecon(Recon):
|
|
|
648
545
|
else:
|
|
649
546
|
save_path = f"{save_path}_{title_suffix}"
|
|
650
547
|
plt.savefig(save_path, dpi=300)
|
|
651
|
-
|
|
548
|
+
if show_logs:
|
|
549
|
+
print(f"Figure saved to: {save_path}")
|
|
652
550
|
|
|
653
551
|
plt.show()
|
|
654
552
|
|
|
@@ -671,12 +569,12 @@ class AlgebraicRecon(Recon):
|
|
|
671
569
|
if not os.path.exists(results_dir):
|
|
672
570
|
os.makedirs(results_dir)
|
|
673
571
|
|
|
674
|
-
if os.path.exists(os.path.join(results_dir,"
|
|
572
|
+
if os.path.exists(os.path.join(results_dir,"indices.npy")):
|
|
675
573
|
return (True, results_dir)
|
|
676
574
|
|
|
677
575
|
return (False, results_dir)
|
|
678
576
|
|
|
679
|
-
def load(self,
|
|
577
|
+
def load(self, withTumor=True, results_date=None, optimizer=None, filePath=None, show_logs=True):
|
|
680
578
|
"""
|
|
681
579
|
Load the reconstruction results (reconPhantom or reconLaser) and indices as lists of 2D np arrays into self.
|
|
682
580
|
If the loaded file is a 3D array, it is split into a list of 2D arrays.
|
|
@@ -686,53 +584,301 @@ class AlgebraicRecon(Recon):
|
|
|
686
584
|
optimizer: Optimizer name (as string or enum) to filter results. If None, uses the current optimizer of the instance.
|
|
687
585
|
filePath: Optional. If provided, loads directly from this path (overrides saveDir and results_date).
|
|
688
586
|
"""
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
587
|
+
if filePath is not None:
|
|
588
|
+
# Mode chargement direct depuis un fichier
|
|
589
|
+
recon_key = 'reconPhantom' if withTumor else 'reconLaser'
|
|
590
|
+
recon_path = filePath
|
|
591
|
+
if not os.path.exists(recon_path):
|
|
592
|
+
raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
|
|
593
|
+
# Charge le fichier (3D ou liste de 2D)
|
|
594
|
+
data = np.load(recon_path, allow_pickle=True)
|
|
595
|
+
# Découpe en liste de 2D si c'est un tableau 3D
|
|
596
|
+
if isinstance(data, np.ndarray) and data.ndim == 3:
|
|
597
|
+
if withTumor:
|
|
598
|
+
self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
|
|
599
|
+
else:
|
|
600
|
+
self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
|
|
601
|
+
else:
|
|
602
|
+
# Sinon, suppose que c'est déjà une liste de 2D
|
|
603
|
+
if withTumor:
|
|
604
|
+
self.reconPhantom = data
|
|
605
|
+
else:
|
|
606
|
+
self.reconLaser = data
|
|
607
|
+
# Essayer de charger les indices
|
|
608
|
+
base_dir, _ = os.path.split(recon_path)
|
|
609
|
+
indices_path = os.path.join(base_dir, 'indices.npy')
|
|
610
|
+
if os.path.exists(indices_path):
|
|
611
|
+
indices_data = np.load(indices_path, allow_pickle=True)
|
|
612
|
+
if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
|
|
613
|
+
self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
|
|
614
|
+
else:
|
|
615
|
+
self.indices = indices_data
|
|
616
|
+
else:
|
|
617
|
+
self.indices = None
|
|
618
|
+
|
|
619
|
+
if show_logs:
|
|
620
|
+
print(f"Loaded reconstruction results and indices from {recon_path}")
|
|
621
|
+
else:
|
|
622
|
+
# Mode chargement depuis le répertoire de résultats
|
|
623
|
+
if self.saveDir is None:
|
|
624
|
+
raise ValueError("Save directory is not specified. Please set saveDir before loading.")
|
|
625
|
+
# Use current optimizer and potential function if not provided
|
|
626
|
+
opt_name = optimizer.value if optimizer is not None else self.optimizer.value
|
|
627
|
+
# Build the base directory pattern
|
|
628
|
+
dir_pattern = f'results_*_{opt_name}'
|
|
629
|
+
# Add parameters to the pattern based on the optimizer
|
|
630
|
+
if optimizer is None:
|
|
631
|
+
optimizer = self.optimizer
|
|
632
|
+
if optimizer == OptimizerType.PPGMLEM:
|
|
633
|
+
beta_str = f'_Beta_{self.beta}'
|
|
634
|
+
delta_str = f'_Delta_{self.delta}'
|
|
635
|
+
gamma_str = f'_Gamma_{self.gamma}'
|
|
636
|
+
sigma_str = f'_Sigma_{self.sigma}'
|
|
637
|
+
dir_pattern += f'{beta_str}{delta_str}{gamma_str}{sigma_str}'
|
|
638
|
+
elif optimizer in (OptimizerType.PGC, OptimizerType.DEPIERRO95):
|
|
639
|
+
beta_str = f'_Beta_{self.beta}'
|
|
640
|
+
sigma_str = f'_Sigma_{self.sigma}'
|
|
641
|
+
dir_pattern += f'{beta_str}{sigma_str}'
|
|
642
|
+
# Find the most recent results directory if no date is specified
|
|
643
|
+
if results_date is None:
|
|
644
|
+
dirs = [d for d in os.listdir(self.saveDir) if os.path.isdir(os.path.join(self.saveDir, d)) and dir_pattern in d]
|
|
645
|
+
if not dirs:
|
|
646
|
+
raise FileNotFoundError(f"No matching results directory found for pattern '{dir_pattern}' in {self.saveDir}.")
|
|
647
|
+
dirs.sort(reverse=True) # Most recent first
|
|
648
|
+
results_dir = os.path.join(self.saveDir, dirs[0])
|
|
649
|
+
else:
|
|
650
|
+
results_dir = os.path.join(self.saveDir, f'results_{results_date}_{opt_name}')
|
|
651
|
+
if optimizer == OptimizerType.MLEM:
|
|
652
|
+
pass
|
|
653
|
+
elif optimizer == OptimizerType.LS:
|
|
654
|
+
results_dir += f'_Alpha_{self.alpha}'
|
|
655
|
+
if not os.path.exists(results_dir):
|
|
656
|
+
raise FileNotFoundError(f"Directory {results_dir} does not exist.")
|
|
657
|
+
# Load reconstruction results
|
|
658
|
+
recon_key = 'reconPhantom' if withTumor else 'reconLaser'
|
|
659
|
+
recon_path = os.path.join(results_dir, f'{recon_key}.npy')
|
|
660
|
+
if not os.path.exists(recon_path):
|
|
661
|
+
raise FileNotFoundError(f"No reconstruction file found at {recon_path}.")
|
|
662
|
+
data = np.load(recon_path, allow_pickle=True)
|
|
663
|
+
if isinstance(data, np.ndarray) and data.ndim == 3:
|
|
664
|
+
if withTumor:
|
|
665
|
+
self.reconPhantom = [data[i, :, :] for i in range(data.shape[0])]
|
|
666
|
+
else:
|
|
667
|
+
self.reconLaser = [data[i, :, :] for i in range(data.shape[0])]
|
|
668
|
+
else:
|
|
669
|
+
if withTumor:
|
|
670
|
+
self.reconPhantom = data
|
|
671
|
+
else:
|
|
672
|
+
self.reconLaser = data
|
|
673
|
+
# Load saved indices as list of 2D arrays
|
|
674
|
+
indices_path = os.path.join(results_dir, 'indices.npy')
|
|
675
|
+
if not os.path.exists(indices_path):
|
|
676
|
+
raise FileNotFoundError(f"No indices file found at {indices_path}.")
|
|
677
|
+
indices_data = np.load(indices_path, allow_pickle=True)
|
|
678
|
+
if isinstance(indices_data, np.ndarray) and indices_data.ndim == 3:
|
|
679
|
+
self.indices = [indices_data[i, :, :] for i in range(indices_data.shape[0])]
|
|
680
|
+
else:
|
|
681
|
+
self.indices = indices_data
|
|
682
|
+
if show_logs:
|
|
683
|
+
print(f"Loaded reconstruction results and indices from {results_dir}")
|
|
684
|
+
|
|
693
685
|
def normalizeSMatrix(self):
|
|
694
686
|
self.SMatrix = self.SMatrix / (float(self.experiment.params.acoustic['voltage'])*float(self.experiment.params.acoustic['sensitivity']))
|
|
695
687
|
|
|
696
688
|
# PRIVATE METHODS
|
|
697
689
|
|
|
698
|
-
def
|
|
690
|
+
def _sparseSMatrix_CSR(AcousticFields, threshold_factor=0.1, normalize=False):
|
|
691
|
+
"""
|
|
692
|
+
Construit une matrice sparse CSR par morceaux sans concaténation intermédiaire.
|
|
693
|
+
Libère toute la mémoire temporaire à chaque étape.
|
|
694
|
+
"""
|
|
695
|
+
device_index = config.select_best_gpu()
|
|
696
|
+
# Configuration GPU
|
|
697
|
+
cp.cuda.Device(device_index).use()
|
|
698
|
+
dtype = cp.float32
|
|
699
|
+
dtype_indices = cp.int32
|
|
700
|
+
|
|
701
|
+
# Mesure mémoire initiale
|
|
702
|
+
total_mem, free_mem = cp.cuda.Device(device_index).mem_info
|
|
703
|
+
initial_mem = total_mem - free_mem
|
|
704
|
+
print(f"VRAM initiale: {initial_mem / 1024**3:.3f} Go")
|
|
705
|
+
|
|
706
|
+
N = len(AcousticFields)
|
|
707
|
+
if N == 0:
|
|
708
|
+
raise ValueError("Aucun champ acoustique fourni.")
|
|
709
|
+
|
|
710
|
+
# Déterminer les dimensions
|
|
711
|
+
field = AcousticFields[0].field
|
|
712
|
+
if isinstance(field, np.ndarray):
|
|
713
|
+
field = cp.asarray(field, dtype=dtype)
|
|
714
|
+
T, Z, X = field.shape
|
|
715
|
+
TN, ZX = T * N, Z * X
|
|
716
|
+
|
|
717
|
+
# Création d'une matrice CSR vide
|
|
718
|
+
SMatrix = cpsparse.csr_matrix((TN, ZX), dtype=dtype)
|
|
719
|
+
|
|
720
|
+
try:
|
|
721
|
+
# Traitement field par field
|
|
722
|
+
for n in trange(N, desc="Sparsing fields", unit="field"):
|
|
723
|
+
field = AcousticFields[n].field
|
|
724
|
+
if isinstance(field, np.ndarray):
|
|
725
|
+
field = cp.asarray(field, dtype=dtype)
|
|
726
|
+
|
|
727
|
+
# Liste pour stocker les données du field courant
|
|
728
|
+
field_rows = []
|
|
729
|
+
field_cols = []
|
|
730
|
+
field_values = []
|
|
731
|
+
|
|
732
|
+
for t in range(T):
|
|
733
|
+
field_t = field[t]
|
|
734
|
+
threshold = threshold_factor * cp.max(cp.abs(field_t))
|
|
735
|
+
mask = cp.abs(field_t) > threshold
|
|
736
|
+
|
|
737
|
+
if not cp.any(mask):
|
|
738
|
+
continue
|
|
739
|
+
|
|
740
|
+
z_idx, x_idx = cp.where(mask)
|
|
741
|
+
rows = cp.full_like(z_idx, t * N + n, dtype=dtype_indices)
|
|
742
|
+
cols = z_idx * X + x_idx
|
|
743
|
+
vals = field_t[mask].astype(dtype)
|
|
744
|
+
|
|
745
|
+
if normalize:
|
|
746
|
+
max_val = cp.max(cp.abs(vals)) if vals.size > 0 else 1.0
|
|
747
|
+
vals = vals / max_val if max_val > 0 else vals
|
|
748
|
+
|
|
749
|
+
field_rows.append(rows)
|
|
750
|
+
field_cols.append(cols)
|
|
751
|
+
field_values.append(vals)
|
|
752
|
+
|
|
753
|
+
# Libération immédiate
|
|
754
|
+
del rows, cols, vals, z_idx, x_idx, mask, field_t
|
|
755
|
+
cp.get_default_memory_pool().free_all_blocks()
|
|
756
|
+
|
|
757
|
+
# Si des données pour ce field
|
|
758
|
+
if field_rows:
|
|
759
|
+
# Création d'une matrice COO temporaire pour ce field
|
|
760
|
+
temp_rows = cp.concatenate(field_rows)
|
|
761
|
+
temp_cols = cp.concatenate(field_cols)
|
|
762
|
+
temp_values = cp.concatenate(field_values)
|
|
763
|
+
|
|
764
|
+
# Création d'une matrice COO puis conversion en CSR
|
|
765
|
+
field_matrix = cpsparse.coo_matrix(
|
|
766
|
+
(temp_values, (temp_rows, temp_cols)),
|
|
767
|
+
shape=(TN, ZX),
|
|
768
|
+
dtype=dtype
|
|
769
|
+
).tocsr()
|
|
770
|
+
|
|
771
|
+
# Ajout à la matrice globale
|
|
772
|
+
SMatrix += field_matrix
|
|
773
|
+
|
|
774
|
+
# Libération mémoire
|
|
775
|
+
del field_rows, field_cols, field_values
|
|
776
|
+
del temp_rows, temp_cols, temp_values, field_matrix
|
|
777
|
+
cp.get_default_memory_pool().free_all_blocks()
|
|
778
|
+
gc.collect()
|
|
779
|
+
|
|
780
|
+
# Libération du field
|
|
781
|
+
del field
|
|
782
|
+
cp.get_default_memory_pool().free_all_blocks()
|
|
783
|
+
gc.collect()
|
|
784
|
+
|
|
785
|
+
# Optimisation finale de la matrice
|
|
786
|
+
SMatrix.sum_duplicates()
|
|
787
|
+
SMatrix.eliminate_zeros()
|
|
788
|
+
|
|
789
|
+
# Calcul des métriques
|
|
790
|
+
nnz = SMatrix.nnz
|
|
791
|
+
density = nnz / (TN * ZX)
|
|
792
|
+
size_bytes = nnz * (4 + 4) # 4 octets pour int32 + 4 pour float32
|
|
793
|
+
|
|
794
|
+
print(f"Dimensions: {TN} x {ZX}, NNZ={nnz:,} (density={density:.2%}) using {size_bytes / 1024**3:.2f} Go of VRAM")
|
|
795
|
+
|
|
796
|
+
# Dernière libération avant le return
|
|
797
|
+
cp.get_default_memory_pool().free_all_blocks()
|
|
798
|
+
gc.collect()
|
|
799
|
+
|
|
800
|
+
return {
|
|
801
|
+
'DATA': SMatrix,
|
|
802
|
+
'density': density,
|
|
803
|
+
'size_bytes': size_bytes
|
|
804
|
+
}, Z
|
|
805
|
+
|
|
806
|
+
except Exception as e:
|
|
807
|
+
print(f"Erreur: {str(e)}")
|
|
808
|
+
del SMatrix
|
|
809
|
+
cp.get_default_memory_pool().free_all_blocks()
|
|
810
|
+
gc.collect()
|
|
811
|
+
raise
|
|
812
|
+
|
|
813
|
+
|
|
814
|
+
def _AlgebraicReconPython(self,withTumor, show_logs):
|
|
699
815
|
|
|
700
816
|
if withTumor:
|
|
701
817
|
if self.experiment.AOsignal_withTumor is None:
|
|
702
818
|
raise ValueError("AO signal with tumor is not available. Please generate AO signal with tumor the experiment first in the experiment object.")
|
|
703
|
-
else:
|
|
704
|
-
y = self.experiment.AOsignal_withTumor
|
|
705
819
|
else:
|
|
706
820
|
if self.experiment.AOsignal_withoutTumor is None:
|
|
707
821
|
raise ValueError("AO signal without tumor is not available. Please generate AO signal without tumor the experiment first in the experiment object.")
|
|
708
|
-
|
|
709
|
-
|
|
822
|
+
|
|
823
|
+
if self.useSparseSMatrix and self.SMatrix_sparse is None:
|
|
824
|
+
raise ValueError("Sparse SMatrix is not available. Please generate sparse SMatrix first.")
|
|
710
825
|
|
|
711
826
|
if self.optimizer.value == OptimizerType.MLEM.value:
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
827
|
+
if withTumor:
|
|
828
|
+
self.reconPhantom, self.indices = MLEM(SMatrix=self.SMatrix,
|
|
829
|
+
y=self.experiment.AOsignal_withTumor,
|
|
830
|
+
numIterations=self.numIterations,
|
|
831
|
+
isSavingEachIteration=self.isSavingEachIteration,
|
|
832
|
+
withTumor=withTumor,
|
|
833
|
+
device=self.device,
|
|
834
|
+
use_numba=self.isMultiCPU,
|
|
835
|
+
denominator_threshold=self.denominatorThreshold,
|
|
836
|
+
max_saves=self.maxSaves,
|
|
837
|
+
show_logs=show_logs,
|
|
838
|
+
useSparseSMatrix=self.useSparseSMatrix,
|
|
839
|
+
Z=self.Z_dim
|
|
840
|
+
)
|
|
841
|
+
else:
|
|
842
|
+
self.reconLaser, self.indices = MLEM(SMatrix=self.SMatrix,
|
|
843
|
+
y=self.experiment.AOsignal_withoutTumor,
|
|
844
|
+
numIterations=self.numIterations,
|
|
845
|
+
isSavingEachIteration=self.isSavingEachIteration,
|
|
846
|
+
withTumor=withTumor,
|
|
847
|
+
device=self.device,
|
|
848
|
+
use_numba=self.isMultiCPU,
|
|
849
|
+
denominator_threshold=self.denominatorThreshold,
|
|
850
|
+
max_saves=self.maxSaves,
|
|
851
|
+
show_logs=show_logs,
|
|
852
|
+
useSparseSMatrix=self.useSparseSMatrix,
|
|
853
|
+
Z=self.Z_dim
|
|
854
|
+
)
|
|
721
855
|
elif self.optimizer.value == OptimizerType.LS.value:
|
|
722
856
|
if self.alpha is None:
|
|
723
857
|
raise ValueError("Alpha (regularization parameter) must be set for LS reconstruction.")
|
|
724
|
-
|
|
725
|
-
|
|
858
|
+
if withTumor:
|
|
859
|
+
self.reconPhantom, self.indices = LS(SMatrix=self.SMatrix,
|
|
860
|
+
y=self.experiment.AOsignal_withTumor,
|
|
726
861
|
numIterations=self.numIterations,
|
|
727
862
|
isSavingEachIteration=self.isSavingEachIteration,
|
|
728
863
|
withTumor=withTumor,
|
|
729
864
|
alpha=self.alpha,
|
|
730
865
|
max_saves=self.maxSaves,
|
|
866
|
+
show_logs=show_logs
|
|
867
|
+
)
|
|
868
|
+
else:
|
|
869
|
+
self.reconLaser, self.indices = LS(SMatrix=self.SMatrix,
|
|
870
|
+
y=self.experiment.AOsignal_withoutTumor,
|
|
871
|
+
numIterations=self.numIterations,
|
|
872
|
+
isSavingEachIteration=self.isSavingEachIteration,
|
|
873
|
+
withTumor=withTumor,
|
|
874
|
+
alpha=self.alpha,
|
|
875
|
+
max_saves=self.maxSaves,
|
|
876
|
+
show_logs=show_logs
|
|
731
877
|
)
|
|
732
878
|
else:
|
|
733
879
|
raise ValueError(f"Only MLEM and LS are supported for simple algebraic reconstruction. {self.optimizer.value} need Bayesian reconstruction")
|
|
734
880
|
|
|
735
|
-
def _AlgebraicReconCASToR(self,
|
|
881
|
+
def _AlgebraicReconCASToR(self,withTumor, show_logs):
|
|
736
882
|
# Définir les chemins
|
|
737
883
|
smatrix = os.path.join(self.saveDir, "system_matrix")
|
|
738
884
|
if withTumor:
|
|
@@ -742,14 +888,16 @@ class AlgebraicRecon(Recon):
|
|
|
742
888
|
|
|
743
889
|
# Vérifier et générer les fichiers d'entrée si nécessaire
|
|
744
890
|
if not os.path.isfile(os.path.join(self.saveDir, fileName)):
|
|
745
|
-
|
|
891
|
+
if show_logs:
|
|
892
|
+
print(f"Fichier .cdh manquant. Génération de {fileName}...")
|
|
746
893
|
self.experiment.saveAOsignals_Castor(self.saveDir)
|
|
747
894
|
|
|
748
895
|
# Vérifier/générer la matrice système
|
|
749
896
|
if not os.path.isdir(smatrix):
|
|
750
897
|
os.makedirs(smatrix, exist_ok=True)
|
|
751
898
|
if not os.listdir(smatrix):
|
|
752
|
-
|
|
899
|
+
if show_logs:
|
|
900
|
+
print("Matrice système manquante. Génération...")
|
|
753
901
|
self.experiment.saveAcousticFields(self.saveDir)
|
|
754
902
|
|
|
755
903
|
# Vérifier que le fichier .cdh existe (redondant mais sûr)
|
|
@@ -788,8 +936,9 @@ class AlgebraicRecon(Recon):
|
|
|
788
936
|
]
|
|
789
937
|
|
|
790
938
|
# Afficher la commande (pour débogage)
|
|
791
|
-
|
|
792
|
-
|
|
939
|
+
if show_logs:
|
|
940
|
+
print("Commande CASToR :")
|
|
941
|
+
print(" ".join(cmd))
|
|
793
942
|
|
|
794
943
|
# Chemin du script temporaire
|
|
795
944
|
recon_script_path = os.path.join(gettempdir(), 'recon.sh')
|
|
@@ -803,17 +952,20 @@ class AlgebraicRecon(Recon):
|
|
|
803
952
|
|
|
804
953
|
# Rendre le script exécutable et l'exécuter
|
|
805
954
|
subprocess.run(["chmod", "+x", recon_script_path], check=True)
|
|
806
|
-
|
|
955
|
+
if show_logs:
|
|
956
|
+
print(f"Exécution de la reconstruction avec CASToR...")
|
|
807
957
|
result = subprocess.run(recon_script_path, env=env, check=True, capture_output=True, text=True)
|
|
808
958
|
|
|
809
959
|
# Afficher la sortie de CASToR (pour débogage)
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
960
|
+
if show_logs:
|
|
961
|
+
print("Sortie CASToR :")
|
|
962
|
+
print(result.stdout)
|
|
963
|
+
if result.stderr:
|
|
964
|
+
print("Erreurs :")
|
|
965
|
+
print(result.stderr)
|
|
966
|
+
|
|
967
|
+
if show_logs:
|
|
968
|
+
print("Reconstruction terminée avec succès.")
|
|
817
969
|
self.load_reconCASToR(withTumor=withTumor)
|
|
818
970
|
|
|
819
971
|
# STATIC METHODS
|