pivtools 0.1.3__cp311-cp311-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (127) hide show
  1. pivtools-0.1.3.dist-info/METADATA +222 -0
  2. pivtools-0.1.3.dist-info/RECORD +127 -0
  3. pivtools-0.1.3.dist-info/WHEEL +5 -0
  4. pivtools-0.1.3.dist-info/entry_points.txt +3 -0
  5. pivtools-0.1.3.dist-info/top_level.txt +3 -0
  6. pivtools_cli/__init__.py +5 -0
  7. pivtools_cli/_build_marker.c +25 -0
  8. pivtools_cli/_build_marker.cp311-win_amd64.pyd +0 -0
  9. pivtools_cli/cli.py +225 -0
  10. pivtools_cli/example.py +139 -0
  11. pivtools_cli/lib/PIV_2d_cross_correlate.c +334 -0
  12. pivtools_cli/lib/PIV_2d_cross_correlate.h +22 -0
  13. pivtools_cli/lib/common.h +36 -0
  14. pivtools_cli/lib/interp2custom.c +146 -0
  15. pivtools_cli/lib/interp2custom.h +48 -0
  16. pivtools_cli/lib/peak_locate_gsl.c +711 -0
  17. pivtools_cli/lib/peak_locate_gsl.h +40 -0
  18. pivtools_cli/lib/peak_locate_gsl_print.c +736 -0
  19. pivtools_cli/lib/peak_locate_lm.c +751 -0
  20. pivtools_cli/lib/peak_locate_lm.h +27 -0
  21. pivtools_cli/lib/xcorr.c +342 -0
  22. pivtools_cli/lib/xcorr.h +31 -0
  23. pivtools_cli/lib/xcorr_cache.c +78 -0
  24. pivtools_cli/lib/xcorr_cache.h +26 -0
  25. pivtools_cli/piv/interp2custom/interp2custom.py +69 -0
  26. pivtools_cli/piv/piv.py +240 -0
  27. pivtools_cli/piv/piv_backend/base.py +825 -0
  28. pivtools_cli/piv/piv_backend/cpu_instantaneous.py +1005 -0
  29. pivtools_cli/piv/piv_backend/factory.py +28 -0
  30. pivtools_cli/piv/piv_backend/gpu_instantaneous.py +15 -0
  31. pivtools_cli/piv/piv_backend/infilling.py +445 -0
  32. pivtools_cli/piv/piv_backend/outlier_detection.py +306 -0
  33. pivtools_cli/piv/piv_backend/profile_cpu_instantaneous.py +230 -0
  34. pivtools_cli/piv/piv_result.py +40 -0
  35. pivtools_cli/piv/save_results.py +342 -0
  36. pivtools_cli/piv_cluster/cluster.py +108 -0
  37. pivtools_cli/preprocessing/filters.py +399 -0
  38. pivtools_cli/preprocessing/preprocess.py +79 -0
  39. pivtools_cli/tests/helpers.py +107 -0
  40. pivtools_cli/tests/instantaneous_piv/test_piv_integration.py +167 -0
  41. pivtools_cli/tests/instantaneous_piv/test_piv_integration_multi.py +553 -0
  42. pivtools_cli/tests/preprocessing/test_filters.py +41 -0
  43. pivtools_core/__init__.py +5 -0
  44. pivtools_core/config.py +703 -0
  45. pivtools_core/config.yaml +135 -0
  46. pivtools_core/image_handling/__init__.py +0 -0
  47. pivtools_core/image_handling/load_images.py +464 -0
  48. pivtools_core/image_handling/readers/__init__.py +53 -0
  49. pivtools_core/image_handling/readers/generic_readers.py +50 -0
  50. pivtools_core/image_handling/readers/lavision_reader.py +190 -0
  51. pivtools_core/image_handling/readers/registry.py +24 -0
  52. pivtools_core/paths.py +49 -0
  53. pivtools_core/vector_loading.py +248 -0
  54. pivtools_gui/__init__.py +3 -0
  55. pivtools_gui/app.py +687 -0
  56. pivtools_gui/calibration/__init__.py +0 -0
  57. pivtools_gui/calibration/app/__init__.py +0 -0
  58. pivtools_gui/calibration/app/views.py +1186 -0
  59. pivtools_gui/calibration/calibration_planar/planar_calibration_production.py +570 -0
  60. pivtools_gui/calibration/vector_calibration_production.py +544 -0
  61. pivtools_gui/config.py +703 -0
  62. pivtools_gui/image_handling/__init__.py +0 -0
  63. pivtools_gui/image_handling/load_images.py +464 -0
  64. pivtools_gui/image_handling/readers/__init__.py +53 -0
  65. pivtools_gui/image_handling/readers/generic_readers.py +50 -0
  66. pivtools_gui/image_handling/readers/lavision_reader.py +190 -0
  67. pivtools_gui/image_handling/readers/registry.py +24 -0
  68. pivtools_gui/masking/__init__.py +0 -0
  69. pivtools_gui/masking/app/__init__.py +0 -0
  70. pivtools_gui/masking/app/views.py +123 -0
  71. pivtools_gui/paths.py +49 -0
  72. pivtools_gui/piv_runner.py +261 -0
  73. pivtools_gui/pivtools.py +58 -0
  74. pivtools_gui/plotting/__init__.py +0 -0
  75. pivtools_gui/plotting/app/__init__.py +0 -0
  76. pivtools_gui/plotting/app/views.py +1671 -0
  77. pivtools_gui/plotting/plot_maker.py +220 -0
  78. pivtools_gui/post_processing/POD/__init__.py +0 -0
  79. pivtools_gui/post_processing/POD/app/__init__.py +0 -0
  80. pivtools_gui/post_processing/POD/app/views.py +647 -0
  81. pivtools_gui/post_processing/POD/pod_decompose.py +979 -0
  82. pivtools_gui/post_processing/POD/views.py +1096 -0
  83. pivtools_gui/post_processing/__init__.py +0 -0
  84. pivtools_gui/static/404.html +1 -0
  85. pivtools_gui/static/_next/static/chunks/117-d5793c8e79de5511.js +2 -0
  86. pivtools_gui/static/_next/static/chunks/484-cfa8b9348ce4f00e.js +1 -0
  87. pivtools_gui/static/_next/static/chunks/869-320a6b9bdafbb6d3.js +1 -0
  88. pivtools_gui/static/_next/static/chunks/app/_not-found/page-12f067ceb7415e55.js +1 -0
  89. pivtools_gui/static/_next/static/chunks/app/layout-b907d5f31ac82e9d.js +1 -0
  90. pivtools_gui/static/_next/static/chunks/app/page-334cc4e8444cde2f.js +1 -0
  91. pivtools_gui/static/_next/static/chunks/fd9d1056-ad15f396ddf9b7e5.js +1 -0
  92. pivtools_gui/static/_next/static/chunks/framework-f66176bb897dc684.js +1 -0
  93. pivtools_gui/static/_next/static/chunks/main-a1b3ced4d5f6d998.js +1 -0
  94. pivtools_gui/static/_next/static/chunks/main-app-8a63c6f5e7baee11.js +1 -0
  95. pivtools_gui/static/_next/static/chunks/pages/_app-72b849fbd24ac258.js +1 -0
  96. pivtools_gui/static/_next/static/chunks/pages/_error-7ba65e1336b92748.js +1 -0
  97. pivtools_gui/static/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
  98. pivtools_gui/static/_next/static/chunks/webpack-4a8ca7c99e9bb3d8.js +1 -0
  99. pivtools_gui/static/_next/static/css/7d3f2337d7ea12a5.css +3 -0
  100. pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_buildManifest.js +1 -0
  101. pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_ssgManifest.js +1 -0
  102. pivtools_gui/static/file.svg +1 -0
  103. pivtools_gui/static/globe.svg +1 -0
  104. pivtools_gui/static/grid.svg +8 -0
  105. pivtools_gui/static/index.html +1 -0
  106. pivtools_gui/static/index.txt +8 -0
  107. pivtools_gui/static/next.svg +1 -0
  108. pivtools_gui/static/vercel.svg +1 -0
  109. pivtools_gui/static/window.svg +1 -0
  110. pivtools_gui/stereo_reconstruction/__init__.py +0 -0
  111. pivtools_gui/stereo_reconstruction/app/__init__.py +0 -0
  112. pivtools_gui/stereo_reconstruction/app/views.py +1985 -0
  113. pivtools_gui/stereo_reconstruction/stereo_calibration_production.py +606 -0
  114. pivtools_gui/stereo_reconstruction/stereo_reconstruction_production.py +544 -0
  115. pivtools_gui/utils.py +63 -0
  116. pivtools_gui/vector_loading.py +248 -0
  117. pivtools_gui/vector_merging/__init__.py +1 -0
  118. pivtools_gui/vector_merging/app/__init__.py +1 -0
  119. pivtools_gui/vector_merging/app/views.py +759 -0
  120. pivtools_gui/vector_statistics/app/__init__.py +1 -0
  121. pivtools_gui/vector_statistics/app/views.py +710 -0
  122. pivtools_gui/vector_statistics/ensemble_statistics.py +49 -0
  123. pivtools_gui/vector_statistics/instantaneous_statistics.py +311 -0
  124. pivtools_gui/video_maker/__init__.py +0 -0
  125. pivtools_gui/video_maker/app/__init__.py +0 -0
  126. pivtools_gui/video_maker/app/views.py +436 -0
  127. pivtools_gui/video_maker/video_maker.py +662 -0
@@ -0,0 +1,710 @@
1
+ """
2
+ Vector Statistics API views
3
+ Provides endpoints for computing instantaneous statistics (mean and Reynolds stresses)
4
+ with progress tracking.
5
+ """
6
+ import threading
7
+ import time
8
+ import uuid
9
+ from pathlib import Path
10
+
11
+ import dask
12
+ import dask.array as da
13
+ import matplotlib
14
+ import matplotlib.pyplot as plt
15
+ import numpy as np
16
+ import scipy.io
17
+ from flask import Blueprint, jsonify, request
18
+ from loguru import logger
19
+ from scipy.io import savemat
20
+
21
+ matplotlib.use("Agg")
22
+
23
+ from ...config import get_config
24
+ from ...paths import get_data_paths
25
+ from ...plotting.plot_maker import make_scalar_settings, plot_scalar_field
26
+ from ...utils import camera_number
27
+ from ...vector_loading import load_coords_from_directory, load_vectors_from_directory
28
+
29
+ statistics_bp = Blueprint("statistics", __name__)
30
+
31
+ # Global job tracking
32
+ statistics_jobs = {}
33
+
34
+
35
+ def find_non_empty_runs_in_file(data_dir: Path, vector_format: str) -> list:
36
+ """
37
+ Find which runs have non-empty vector data by checking the first vector file.
38
+ Returns list of 1-based run numbers that contain valid data.
39
+ """
40
+ if not data_dir.exists():
41
+ return []
42
+
43
+ # Get first vector file to check run structure
44
+ first_file = data_dir / (vector_format % 1)
45
+ if not first_file.exists():
46
+ return []
47
+
48
+ try:
49
+ mat = scipy.io.loadmat(str(first_file), struct_as_record=False, squeeze_me=True)
50
+ if "piv_result" not in mat:
51
+ return []
52
+
53
+ piv_result = mat["piv_result"]
54
+ valid_runs = []
55
+
56
+ if isinstance(piv_result, np.ndarray) and piv_result.dtype == object:
57
+ # Multiple runs
58
+ for run_idx in range(piv_result.size):
59
+ pr = piv_result[run_idx]
60
+ try:
61
+ # Check if ux has valid data
62
+ ux = np.asarray(getattr(pr, "ux", np.array([])))
63
+ if ux.size > 0 and not np.all(np.isnan(ux)):
64
+ valid_runs.append(run_idx + 1) # Convert to 1-based
65
+ except Exception:
66
+ pass
67
+ else:
68
+ # Single run
69
+ try:
70
+ ux = np.asarray(getattr(piv_result, "ux", np.array([])))
71
+ if ux.size > 0 and not np.all(np.isnan(ux)):
72
+ valid_runs.append(1)
73
+ except Exception:
74
+ pass
75
+
76
+ return valid_runs
77
+ except Exception as e:
78
+ logger.error(f"Error checking runs in {first_file}: {e}")
79
+ return []
80
+
81
+
82
+ def compute_statistics_for_camera(
83
+ base_dir: Path,
84
+ camera: int,
85
+ use_merged: bool,
86
+ num_images: int,
87
+ type_name: str,
88
+ endpoint: str,
89
+ vector_format: str,
90
+ job_id: str,
91
+ ):
92
+ """
93
+ Compute instantaneous statistics for a single camera or merged data.
94
+ Updates job status in statistics_jobs dictionary.
95
+ """
96
+ try:
97
+ cam_folder = "Merged" if use_merged else f"Cam{camera}"
98
+ logger.info(f"[Statistics] Starting for {cam_folder}, endpoint={endpoint}")
99
+
100
+ # Update job status
101
+ statistics_jobs[job_id]["status"] = "running"
102
+ statistics_jobs[job_id]["camera"] = cam_folder
103
+ statistics_jobs[job_id]["progress"] = 5
104
+
105
+ # Get paths - use cam (number) not cam_folder (string)
106
+ paths = get_data_paths(
107
+ base_dir=base_dir,
108
+ num_images=num_images,
109
+ cam=camera,
110
+ type_name=type_name,
111
+ endpoint=endpoint,
112
+ use_merged=use_merged,
113
+ )
114
+
115
+ data_dir = paths["data_dir"]
116
+ if not data_dir.exists():
117
+ raise FileNotFoundError(f"Data directory not found: {data_dir}")
118
+
119
+ statistics_jobs[job_id]["progress"] = 10
120
+
121
+ # Find non-empty runs
122
+ valid_runs = find_non_empty_runs_in_file(data_dir, vector_format)
123
+ if not valid_runs:
124
+ raise ValueError(f"No valid runs found in {data_dir}")
125
+
126
+ logger.info(f"[Statistics] Found {len(valid_runs)} valid runs: {valid_runs}")
127
+ statistics_jobs[job_id]["valid_runs"] = valid_runs
128
+ statistics_jobs[job_id]["progress"] = 15
129
+
130
+ # Create a minimal config object for loading vectors
131
+ class MinimalConfig:
132
+ def __init__(self, num_images, vector_format, piv_chunk_size=100):
133
+ self.num_images = num_images
134
+ self.vector_format = vector_format
135
+ self.piv_chunk_size = piv_chunk_size
136
+
137
+ config = MinimalConfig(num_images, vector_format)
138
+
139
+ # Load and process each run separately (can't stack due to different grid sizes)
140
+ logger.info(f"[Statistics] Loading vectors from {data_dir} for runs {valid_runs}")
141
+ statistics_jobs[job_id]["progress"] = 20
142
+
143
+ # Load coordinates for all valid runs first
144
+ coords_x_list, coords_y_list = load_coords_from_directory(data_dir, runs=valid_runs)
145
+ statistics_jobs[job_id]["progress"] = 25
146
+
147
+ # Process each run independently
148
+ mean_ux_all = []
149
+ mean_uy_all = []
150
+ mean_uz_all = [] if False else None # Will set based on first run
151
+ b_mask_all = []
152
+ uu_all = []
153
+ uv_all = []
154
+ vv_all = []
155
+ uw_all = [] if False else None
156
+ vw_all = [] if False else None
157
+ ww_all = [] if False else None
158
+
159
+ stereo = None # Will be determined from first run
160
+
161
+ for run_idx, run_num in enumerate(valid_runs):
162
+ logger.info(f"[Statistics] Processing run {run_num} ({run_idx + 1}/{len(valid_runs)})")
163
+
164
+ # Load this run's data
165
+ arr_run = load_vectors_from_directory(data_dir, config, runs=[run_num])
166
+ # Shape: (N_files, 1, 3_or_4, H, W)
167
+ arr_run = arr_run[:, 0, :, :, :] # (N_files, 3_or_4, H, W)
168
+
169
+ # Check for stereo on first run
170
+ if stereo is None:
171
+ stereo = arr_run.shape[1] >= 4
172
+ if stereo:
173
+ logger.info("[Statistics] Detected stereo data (4 components)")
174
+ mean_uz_all = []
175
+ uw_all = []
176
+ vw_all = []
177
+ ww_all = []
178
+
179
+ # Extract components
180
+ ux = arr_run[:, 0, :, :] # (N, H, W)
181
+ uy = arr_run[:, 1, :, :]
182
+ if stereo:
183
+ uz = arr_run[:, 2, :, :]
184
+ bmask = arr_run[:, 3, :, :]
185
+ else:
186
+ bmask = arr_run[:, 2, :, :]
187
+
188
+ # Compute statistics for this run
189
+ mean_ux = ux.mean(axis=0)
190
+ mean_uy = uy.mean(axis=0)
191
+ b_mask = bmask[0]
192
+ E_ux2 = (ux**2).mean(axis=0)
193
+ E_uy2 = (uy**2).mean(axis=0)
194
+ E_uxuy = (ux * uy).mean(axis=0)
195
+
196
+ if stereo:
197
+ mean_uz = uz.mean(axis=0)
198
+ E_uz2 = (uz**2).mean(axis=0)
199
+ E_uxuz = (ux * uz).mean(axis=0)
200
+ E_uyuz = (uy * uz).mean(axis=0)
201
+
202
+ # Compute all at once
203
+ mean_ux_c, mean_uy_c, mean_uz_c, b_mask_c, E_ux2_c, E_uy2_c, E_uxuy_c, E_uz2_c, E_uxuz_c, E_uyuz_c = dask.compute(
204
+ mean_ux, mean_uy, mean_uz, b_mask, E_ux2, E_uy2, E_uxuy, E_uz2, E_uxuz, E_uyuz
205
+ )
206
+
207
+ # Compute Reynolds stresses
208
+ uu = E_ux2_c - mean_ux_c**2
209
+ uv = E_uxuy_c - (mean_ux_c * mean_uy_c)
210
+ vv = E_uy2_c - mean_uy_c**2
211
+ uw = E_uxuz_c - (mean_ux_c * mean_uz_c)
212
+ vw = E_uyuz_c - (mean_uy_c * mean_uz_c)
213
+ ww = E_uz2_c - mean_uz_c**2
214
+
215
+ mean_uz_all.append(mean_uz_c)
216
+ uw_all.append(uw)
217
+ vw_all.append(vw)
218
+ ww_all.append(ww)
219
+ else:
220
+ # Compute all at once
221
+ mean_ux_c, mean_uy_c, b_mask_c, E_ux2_c, E_uy2_c, E_uxuy_c = dask.compute(
222
+ mean_ux, mean_uy, b_mask, E_ux2, E_uy2, E_uxuy
223
+ )
224
+
225
+ # Compute Reynolds stresses
226
+ uu = E_ux2_c - mean_ux_c**2
227
+ uv = E_uxuy_c - (mean_ux_c * mean_uy_c)
228
+ vv = E_uy2_c - mean_uy_c**2
229
+
230
+ # Store results
231
+ mean_ux_all.append(mean_ux_c)
232
+ mean_uy_all.append(mean_uy_c)
233
+ b_mask_all.append(b_mask_c)
234
+ uu_all.append(uu)
235
+ uv_all.append(uv)
236
+ vv_all.append(vv)
237
+
238
+ # Update progress
239
+ progress = 30 + int((run_idx + 1) / len(valid_runs) * 45) # 30-75%
240
+ statistics_jobs[job_id]["progress"] = progress
241
+
242
+ logger.info("[Statistics] Completed statistics computation for all runs")
243
+ statistics_jobs[job_id]["progress"] = 75
244
+
245
+ # Create output directory
246
+ stats_dir = paths["stats_dir"]
247
+ stats_dir.mkdir(parents=True, exist_ok=True)
248
+ mean_stats_dir = stats_dir / "mean_stats"
249
+ mean_stats_dir.mkdir(parents=True, exist_ok=True)
250
+
251
+ # Get config for plotting
252
+ cfg = get_config()
253
+ plot_extension = getattr(cfg, "plot_save_extension", ".png")
254
+ save_pickle = getattr(cfg, "plot_save_pickle", False)
255
+
256
+ # Generate plots for each run
257
+ # logger.info("[Statistics] Generating plots")
258
+ # statistics_jobs[job_id]["progress"] = 80
259
+
260
+ # for idx, run_label in enumerate(valid_runs):
261
+ # mask_bool = np.asarray(b_mask_all[idx]).astype(bool)
262
+ # cx = coords_x_list[idx] if idx < len(coords_x_list) else None
263
+ # cy = coords_y_list[idx] if idx < len(coords_y_list) else None
264
+
265
+ # # Plot mean velocities (ux, uy, uz if stereo)
266
+ # save_base_ux = mean_stats_dir / f"ux_{run_label}"
267
+ # settings_ux = make_scalar_settings(
268
+ # cfg,
269
+ # variable="ux",
270
+ # run_label=run_label,
271
+ # save_basepath=save_base_ux,
272
+ # variable_units="m/s",
273
+ # coords_x=cx,
274
+ # coords_y=cy,
275
+ # )
276
+ # fig_ux, _, _ = plot_scalar_field(mean_ux_all[idx], mask_bool, settings_ux)
277
+ # fig_ux.savefig(f"{save_base_ux}{plot_extension}", dpi=1200, bbox_inches="tight")
278
+ # if save_pickle:
279
+ # import pickle
280
+ # with open(f"{save_base_ux}.pkl", "wb") as f:
281
+ # pickle.dump(fig_ux, f)
282
+ # plt.close(fig_ux)
283
+
284
+ # save_base_uy = mean_stats_dir / f"uy_{run_label}"
285
+ # settings_uy = make_scalar_settings(
286
+ # cfg,
287
+ # variable="uy",
288
+ # run_label=run_label,
289
+ # save_basepath=save_base_uy,
290
+ # variable_units="m/s",
291
+ # coords_x=cx,
292
+ # coords_y=cy,
293
+ # )
294
+ # fig_uy, _, _ = plot_scalar_field(mean_uy_all[idx], mask_bool, settings_uy)
295
+ # fig_uy.savefig(f"{save_base_uy}{plot_extension}", dpi=1200, bbox_inches="tight")
296
+ # if save_pickle:
297
+ # import pickle
298
+ # with open(f"{save_base_uy}.pkl", "wb") as f:
299
+ # pickle.dump(fig_uy, f)
300
+ # plt.close(fig_uy)
301
+
302
+ # if stereo:
303
+ # save_base_uz = mean_stats_dir / f"uz_{run_label}"
304
+ # settings_uz = make_scalar_settings(
305
+ # cfg,
306
+ # variable="uz",
307
+ # run_label=run_label,
308
+ # save_basepath=save_base_uz,
309
+ # variable_units="m/s",
310
+ # coords_x=cx,
311
+ # coords_y=cy,
312
+ # )
313
+ # fig_uz, _, _ = plot_scalar_field(mean_uz_all[idx], mask_bool, settings_uz)
314
+ # fig_uz.savefig(f"{save_base_uz}{plot_extension}", dpi=1200, bbox_inches="tight")
315
+ # if save_pickle:
316
+ # import pickle
317
+ # with open(f"{save_base_uz}.pkl", "wb") as f:
318
+ # pickle.dump(fig_uz, f)
319
+ # plt.close(fig_uz)
320
+
321
+ # # Plot Reynolds stresses (uu, uv, vv, and uw, vw, ww if stereo)
322
+ # save_base_uu = mean_stats_dir / f"uu_{run_label}"
323
+ # settings_uu = make_scalar_settings(
324
+ # cfg,
325
+ # variable="uu",
326
+ # run_label=run_label,
327
+ # save_basepath=save_base_uu,
328
+ # variable_units="m²/s²",
329
+ # coords_x=cx,
330
+ # coords_y=cy,
331
+ # )
332
+ # fig_uu, _, _ = plot_scalar_field(uu_all[idx], mask_bool, settings_uu)
333
+ # fig_uu.savefig(f"{save_base_uu}{plot_extension}", dpi=1200, bbox_inches="tight")
334
+ # if save_pickle:
335
+ # import pickle
336
+ # with open(f"{save_base_uu}.pkl", "wb") as f:
337
+ # pickle.dump(fig_uu, f)
338
+ # plt.close(fig_uu)
339
+
340
+ # save_base_uv = mean_stats_dir / f"uv_{run_label}"
341
+ # settings_uv = make_scalar_settings(
342
+ # cfg,
343
+ # variable="uv",
344
+ # run_label=run_label,
345
+ # save_basepath=save_base_uv,
346
+ # variable_units="m²/s²",
347
+ # coords_x=cx,
348
+ # coords_y=cy,
349
+ # )
350
+ # fig_uv, _, _ = plot_scalar_field(uv_all[idx], mask_bool, settings_uv)
351
+ # fig_uv.savefig(f"{save_base_uv}{plot_extension}", dpi=1200, bbox_inches="tight")
352
+ # if save_pickle:
353
+ # import pickle
354
+ # with open(f"{save_base_uv}.pkl", "wb") as f:
355
+ # pickle.dump(fig_uv, f)
356
+ # plt.close(fig_uv)
357
+
358
+ # save_base_vv = mean_stats_dir / f"vv_{run_label}"
359
+ # settings_vv = make_scalar_settings(
360
+ # cfg,
361
+ # variable="vv",
362
+ # run_label=run_label,
363
+ # save_basepath=save_base_vv,
364
+ # variable_units="m²/s²",
365
+ # coords_x=cx,
366
+ # coords_y=cy,
367
+ # )
368
+ # fig_vv, _, _ = plot_scalar_field(vv_all[idx], mask_bool, settings_vv)
369
+ # fig_vv.savefig(f"{save_base_vv}{plot_extension}", dpi=1200, bbox_inches="tight")
370
+ # if save_pickle:
371
+ # import pickle
372
+ # with open(f"{save_base_vv}.pkl", "wb") as f:
373
+ # pickle.dump(fig_vv, f)
374
+ # plt.close(fig_vv)
375
+
376
+ # if stereo:
377
+ # save_base_uw = mean_stats_dir / f"uw_{run_label}"
378
+ # settings_uw = make_scalar_settings(
379
+ # cfg,
380
+ # variable="uw",
381
+ # run_label=run_label,
382
+ # save_basepath=save_base_uw,
383
+ # variable_units="m²/s²",
384
+ # coords_x=cx,
385
+ # coords_y=cy,
386
+ # )
387
+ # fig_uw, _, _ = plot_scalar_field(uw_all[idx], mask_bool, settings_uw)
388
+ # fig_uw.savefig(f"{save_base_uw}{plot_extension}", dpi=1200, bbox_inches="tight")
389
+ # if save_pickle:
390
+ # import pickle
391
+ # with open(f"{save_base_uw}.pkl", "wb") as f:
392
+ # pickle.dump(fig_uw, f)
393
+ # plt.close(fig_uw)
394
+
395
+ # save_base_vw = mean_stats_dir / f"vw_{run_label}"
396
+ # settings_vw = make_scalar_settings(
397
+ # cfg,
398
+ # variable="vw",
399
+ # run_label=run_label,
400
+ # save_basepath=save_base_vw,
401
+ # variable_units="m²/s²",
402
+ # coords_x=cx,
403
+ # coords_y=cy,
404
+ # )
405
+ # fig_vw, _, _ = plot_scalar_field(vw_all[idx], mask_bool, settings_vw)
406
+ # fig_vw.savefig(f"{save_base_vw}{plot_extension}", dpi=1200, bbox_inches="tight")
407
+ # if save_pickle:
408
+ # import pickle
409
+ # with open(f"{save_base_vw}.pkl", "wb") as f:
410
+ # pickle.dump(fig_vw, f)
411
+ # plt.close(fig_vw)
412
+
413
+ # save_base_ww = mean_stats_dir / f"ww_{run_label}"
414
+ # settings_ww = make_scalar_settings(
415
+ # cfg,
416
+ # variable="ww",
417
+ # run_label=run_label,
418
+ # save_basepath=save_base_ww,
419
+ # variable_units="m²/s²",
420
+ # coords_x=cx,
421
+ # coords_y=cy,
422
+ # )
423
+ # fig_ww, _, _ = plot_scalar_field(ww_all[idx], mask_bool, settings_ww)
424
+ # fig_ww.savefig(f"{save_base_ww}{plot_extension}", dpi=1200, bbox_inches="tight")
425
+ # if save_pickle:
426
+ # import pickle
427
+ # with open(f"{save_base_ww}.pkl", "wb") as f:
428
+ # pickle.dump(fig_ww, f)
429
+ # plt.close(fig_ww)
430
+
431
+ statistics_jobs[job_id]["progress"] = 85
432
+
433
+ # Build piv_result structured array
434
+ logger.info("[Statistics] Building piv_result structure")
435
+ n_passes = len(valid_runs)
436
+ dt_fields = [
437
+ ("ux", object),
438
+ ("uy", object),
439
+ ("b_mask", object),
440
+ ("uu", object),
441
+ ("uv", object),
442
+ ("vv", object),
443
+ ]
444
+ if stereo:
445
+ dt_fields.extend([
446
+ ("uz", object),
447
+ ("uw", object),
448
+ ("vw", object),
449
+ ("ww", object),
450
+ ])
451
+
452
+ dt = np.dtype(dt_fields)
453
+ # Create piv_result array with size = max run number (preserving run positions)
454
+ max_run = max(valid_runs)
455
+ piv_result = np.empty((max_run,), dtype=dt)
456
+
457
+ # Initialize all positions with empty arrays (for runs that don't exist)
458
+ for i in range(max_run):
459
+ piv_result["ux"][i] = np.array([])
460
+ piv_result["uy"][i] = np.array([])
461
+ piv_result["b_mask"][i] = np.array([])
462
+ piv_result["uu"][i] = np.array([])
463
+ piv_result["uv"][i] = np.array([])
464
+ piv_result["vv"][i] = np.array([])
465
+ if stereo:
466
+ piv_result["uz"][i] = np.array([])
467
+ piv_result["uw"][i] = np.array([])
468
+ piv_result["vw"][i] = np.array([])
469
+ piv_result["ww"][i] = np.array([])
470
+
471
+ # Fill piv_result only at positions corresponding to valid runs
472
+ # This preserves run positions (e.g., if valid_runs=[3,4], indices 0,1 stay empty, 2,3 get data)
473
+ for list_idx, run_num in enumerate(valid_runs):
474
+ piv_idx = run_num - 1 # Convert run number to 0-based index
475
+ piv_result["ux"][piv_idx] = mean_ux_all[list_idx]
476
+ piv_result["uy"][piv_idx] = mean_uy_all[list_idx]
477
+ piv_result["b_mask"][piv_idx] = b_mask_all[list_idx]
478
+ piv_result["uu"][piv_idx] = uu_all[list_idx]
479
+ piv_result["uv"][piv_idx] = uv_all[list_idx]
480
+ piv_result["vv"][piv_idx] = vv_all[list_idx]
481
+ if stereo:
482
+ piv_result["uz"][piv_idx] = mean_uz_all[list_idx]
483
+ piv_result["uw"][piv_idx] = uw_all[list_idx]
484
+ piv_result["vw"][piv_idx] = vw_all[list_idx]
485
+ piv_result["ww"][piv_idx] = ww_all[list_idx]
486
+
487
+ # Build coordinates structure (also preserving run positions)
488
+ dt_coords = np.dtype([("x", object), ("y", object)])
489
+ coordinates = np.empty((max_run,), dtype=dt_coords)
490
+
491
+ # Initialize all positions with empty arrays
492
+ for i in range(max_run):
493
+ coordinates["x"][i] = np.array([])
494
+ coordinates["y"][i] = np.array([])
495
+
496
+ # Fill only valid run positions
497
+ for list_idx, run_num in enumerate(valid_runs):
498
+ piv_idx = run_num - 1 # Convert run number to 0-based index
499
+ coordinates["x"][piv_idx] = coords_x_list[list_idx]
500
+ coordinates["y"][piv_idx] = coords_y_list[list_idx]
501
+
502
+ statistics_jobs[job_id]["progress"] = 95
503
+
504
+ # Save results in the format expected by the plotting system
505
+ out_file = mean_stats_dir / "mean_stats.mat"
506
+ logger.info(f"[Statistics] Saving results to {out_file}")
507
+
508
+ meta_dict = {
509
+ "endpoint": endpoint,
510
+ "use_merged": use_merged,
511
+ "camera": cam_folder,
512
+ "selected_passes": valid_runs,
513
+ "n_passes": int(n_passes),
514
+ "stereo": stereo,
515
+ "definitions": "ux=<u>, uy=<v>, uu=<u'^2>, uv=<u'v'>, vv=<v'^2>"
516
+ + (", uz=<w>, uw=<u'w'>, vw=<v'w'>, ww=<w'^2>" if stereo else ""),
517
+ }
518
+
519
+ # Save in the same file with both piv_result, coordinates, and meta
520
+ savemat(
521
+ out_file,
522
+ {
523
+ "piv_result": piv_result,
524
+ "coordinates": coordinates,
525
+ "meta": meta_dict,
526
+ }
527
+ )
528
+
529
+ statistics_jobs[job_id]["progress"] = 100
530
+ statistics_jobs[job_id]["status"] = "completed"
531
+ statistics_jobs[job_id]["output_file"] = str(out_file)
532
+ logger.info(f"[Statistics] Completed successfully for {cam_folder}")
533
+
534
+ except Exception as e:
535
+ logger.error(f"[Statistics] Error: {e}", exc_info=True)
536
+ statistics_jobs[job_id]["status"] = "failed"
537
+ statistics_jobs[job_id]["error"] = str(e)
538
+
539
+
540
+ @statistics_bp.route("/statistics/calculate", methods=["POST"])
541
+ def calculate_statistics():
542
+ """
543
+ Start statistics calculation job.
544
+ Expects JSON with: base_path_idx, cameras (list), include_merged (bool),
545
+ image_count, type_name, endpoint
546
+ """
547
+ data = request.get_json() or {}
548
+ logger.info(f"Received statistics calculation request: {data}")
549
+ base_path_idx = int(data.get("base_path_idx", 0))
550
+ cameras = data.get("cameras", []) # List of camera numbers
551
+ include_merged = bool(data.get("include_merged", False))
552
+ image_count = int(data.get("image_count", 1000))
553
+ type_name = data.get("type_name", "instantaneous")
554
+ endpoint = data.get("endpoint", "")
555
+
556
+ try:
557
+ cfg = get_config()
558
+ base_paths = getattr(cfg, "base_paths", getattr(cfg, "source_paths", []))
559
+ if not base_paths or base_path_idx >= len(base_paths):
560
+ return jsonify({"error": "Invalid base_path_idx"}), 400
561
+
562
+ base_dir = Path(base_paths[base_path_idx])
563
+ vector_format = getattr(cfg, "vector_format", "%05d.mat")
564
+
565
+ # Create a parent job to track all sub-jobs
566
+ parent_job_id = str(uuid.uuid4())
567
+ sub_jobs = []
568
+
569
+ # Check if merged data exists if requested
570
+ if include_merged:
571
+ # For merged data, we still need to pass a camera number (use first camera)
572
+ # but with use_merged=True flag
573
+ first_cam = cameras[0] if cameras else 1
574
+ merged_paths = get_data_paths(
575
+ base_dir=base_dir,
576
+ num_images=image_count,
577
+ cam=first_cam,
578
+ type_name=type_name,
579
+ endpoint=endpoint,
580
+ use_merged=True,
581
+ )
582
+ if merged_paths["data_dir"].exists():
583
+ job_id = str(uuid.uuid4())
584
+ sub_jobs.append({"job_id": job_id, "type": "merged"})
585
+ statistics_jobs[job_id] = {
586
+ "status": "starting",
587
+ "progress": 0,
588
+ "start_time": time.time(),
589
+ "camera": "Merged",
590
+ "parent_job_id": parent_job_id,
591
+ }
592
+
593
+ # Start thread for merged - pass first camera number
594
+ thread = threading.Thread(
595
+ target=compute_statistics_for_camera,
596
+ args=(
597
+ base_dir,
598
+ first_cam, # Use first camera number
599
+ True, # use_merged
600
+ image_count,
601
+ type_name,
602
+ endpoint,
603
+ vector_format,
604
+ job_id,
605
+ ),
606
+ )
607
+ thread.daemon = True
608
+ thread.start()
609
+ else:
610
+ logger.warning(f"Merged data directory not found: {merged_paths['data_dir']}")
611
+
612
+ # Start jobs for each camera
613
+ for cam in cameras:
614
+ cam_num = camera_number(cam)
615
+ job_id = str(uuid.uuid4())
616
+ sub_jobs.append({"job_id": job_id, "type": f"camera_{cam_num}"})
617
+ statistics_jobs[job_id] = {
618
+ "status": "starting",
619
+ "progress": 0,
620
+ "start_time": time.time(),
621
+ "camera": f"Cam{cam_num}",
622
+ "parent_job_id": parent_job_id,
623
+ }
624
+
625
+ # Start thread
626
+ thread = threading.Thread(
627
+ target=compute_statistics_for_camera,
628
+ args=(
629
+ base_dir,
630
+ cam_num,
631
+ False, # use_merged
632
+ image_count,
633
+ type_name,
634
+ endpoint,
635
+ vector_format,
636
+ job_id,
637
+ ),
638
+ )
639
+ thread.daemon = True
640
+ thread.start()
641
+
642
+ # Store parent job
643
+ statistics_jobs[parent_job_id] = {
644
+ "status": "running",
645
+ "sub_jobs": sub_jobs,
646
+ "start_time": time.time(),
647
+ }
648
+
649
+ return jsonify({
650
+ "parent_job_id": parent_job_id,
651
+ "sub_jobs": sub_jobs,
652
+ "status": "starting",
653
+ "message": f"Statistics calculation started for {len(cameras)} camera(s)" +
654
+ (" and merged data" if include_merged else ""),
655
+ })
656
+
657
+ except Exception as e:
658
+ logger.error(f"Error starting statistics calculation: {e}", exc_info=True)
659
+ return jsonify({"error": str(e)}), 500
660
+
661
+
662
+ @statistics_bp.route("/statistics/status/<job_id>", methods=["GET"])
663
+ def get_statistics_status(job_id):
664
+ """Get statistics calculation job status"""
665
+ if job_id not in statistics_jobs:
666
+ return jsonify({"error": "Job not found"}), 404
667
+
668
+ job_data = statistics_jobs[job_id].copy()
669
+
670
+ # If this is a parent job, aggregate sub-job status
671
+ if "sub_jobs" in job_data:
672
+ sub_job_statuses = []
673
+ all_completed = True
674
+ any_failed = False
675
+ total_progress = 0
676
+
677
+ for sub_job in job_data["sub_jobs"]:
678
+ sub_id = sub_job["job_id"]
679
+ if sub_id in statistics_jobs:
680
+ sub_status = statistics_jobs[sub_id].copy()
681
+ sub_status["type"] = sub_job["type"]
682
+ sub_job_statuses.append(sub_status)
683
+
684
+ if sub_status["status"] != "completed":
685
+ all_completed = False
686
+ if sub_status["status"] == "failed":
687
+ any_failed = True
688
+
689
+ total_progress += sub_status.get("progress", 0)
690
+
691
+ job_data["sub_job_statuses"] = sub_job_statuses
692
+ job_data["overall_progress"] = total_progress / max(1, len(sub_job_statuses))
693
+
694
+ if any_failed:
695
+ job_data["status"] = "failed"
696
+ elif all_completed:
697
+ job_data["status"] = "completed"
698
+ else:
699
+ job_data["status"] = "running"
700
+
701
+ # Add timing info
702
+ if "start_time" in job_data:
703
+ elapsed = time.time() - job_data["start_time"]
704
+ job_data["elapsed_time"] = elapsed
705
+
706
+ if job_data["status"] == "running" and job_data.get("progress", 0) > 0:
707
+ estimated_total = elapsed / (job_data["progress"] / 100)
708
+ job_data["estimated_remaining"] = estimated_total - elapsed
709
+
710
+ return jsonify(job_data)