pivtools 0.1.3__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pivtools-0.1.3.dist-info/METADATA +222 -0
- pivtools-0.1.3.dist-info/RECORD +127 -0
- pivtools-0.1.3.dist-info/WHEEL +5 -0
- pivtools-0.1.3.dist-info/entry_points.txt +3 -0
- pivtools-0.1.3.dist-info/top_level.txt +3 -0
- pivtools_cli/__init__.py +5 -0
- pivtools_cli/_build_marker.c +25 -0
- pivtools_cli/_build_marker.cp311-win_amd64.pyd +0 -0
- pivtools_cli/cli.py +225 -0
- pivtools_cli/example.py +139 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.c +334 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.h +22 -0
- pivtools_cli/lib/common.h +36 -0
- pivtools_cli/lib/interp2custom.c +146 -0
- pivtools_cli/lib/interp2custom.h +48 -0
- pivtools_cli/lib/peak_locate_gsl.c +711 -0
- pivtools_cli/lib/peak_locate_gsl.h +40 -0
- pivtools_cli/lib/peak_locate_gsl_print.c +736 -0
- pivtools_cli/lib/peak_locate_lm.c +751 -0
- pivtools_cli/lib/peak_locate_lm.h +27 -0
- pivtools_cli/lib/xcorr.c +342 -0
- pivtools_cli/lib/xcorr.h +31 -0
- pivtools_cli/lib/xcorr_cache.c +78 -0
- pivtools_cli/lib/xcorr_cache.h +26 -0
- pivtools_cli/piv/interp2custom/interp2custom.py +69 -0
- pivtools_cli/piv/piv.py +240 -0
- pivtools_cli/piv/piv_backend/base.py +825 -0
- pivtools_cli/piv/piv_backend/cpu_instantaneous.py +1005 -0
- pivtools_cli/piv/piv_backend/factory.py +28 -0
- pivtools_cli/piv/piv_backend/gpu_instantaneous.py +15 -0
- pivtools_cli/piv/piv_backend/infilling.py +445 -0
- pivtools_cli/piv/piv_backend/outlier_detection.py +306 -0
- pivtools_cli/piv/piv_backend/profile_cpu_instantaneous.py +230 -0
- pivtools_cli/piv/piv_result.py +40 -0
- pivtools_cli/piv/save_results.py +342 -0
- pivtools_cli/piv_cluster/cluster.py +108 -0
- pivtools_cli/preprocessing/filters.py +399 -0
- pivtools_cli/preprocessing/preprocess.py +79 -0
- pivtools_cli/tests/helpers.py +107 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration.py +167 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration_multi.py +553 -0
- pivtools_cli/tests/preprocessing/test_filters.py +41 -0
- pivtools_core/__init__.py +5 -0
- pivtools_core/config.py +703 -0
- pivtools_core/config.yaml +135 -0
- pivtools_core/image_handling/__init__.py +0 -0
- pivtools_core/image_handling/load_images.py +464 -0
- pivtools_core/image_handling/readers/__init__.py +53 -0
- pivtools_core/image_handling/readers/generic_readers.py +50 -0
- pivtools_core/image_handling/readers/lavision_reader.py +190 -0
- pivtools_core/image_handling/readers/registry.py +24 -0
- pivtools_core/paths.py +49 -0
- pivtools_core/vector_loading.py +248 -0
- pivtools_gui/__init__.py +3 -0
- pivtools_gui/app.py +687 -0
- pivtools_gui/calibration/__init__.py +0 -0
- pivtools_gui/calibration/app/__init__.py +0 -0
- pivtools_gui/calibration/app/views.py +1186 -0
- pivtools_gui/calibration/calibration_planar/planar_calibration_production.py +570 -0
- pivtools_gui/calibration/vector_calibration_production.py +544 -0
- pivtools_gui/config.py +703 -0
- pivtools_gui/image_handling/__init__.py +0 -0
- pivtools_gui/image_handling/load_images.py +464 -0
- pivtools_gui/image_handling/readers/__init__.py +53 -0
- pivtools_gui/image_handling/readers/generic_readers.py +50 -0
- pivtools_gui/image_handling/readers/lavision_reader.py +190 -0
- pivtools_gui/image_handling/readers/registry.py +24 -0
- pivtools_gui/masking/__init__.py +0 -0
- pivtools_gui/masking/app/__init__.py +0 -0
- pivtools_gui/masking/app/views.py +123 -0
- pivtools_gui/paths.py +49 -0
- pivtools_gui/piv_runner.py +261 -0
- pivtools_gui/pivtools.py +58 -0
- pivtools_gui/plotting/__init__.py +0 -0
- pivtools_gui/plotting/app/__init__.py +0 -0
- pivtools_gui/plotting/app/views.py +1671 -0
- pivtools_gui/plotting/plot_maker.py +220 -0
- pivtools_gui/post_processing/POD/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/views.py +647 -0
- pivtools_gui/post_processing/POD/pod_decompose.py +979 -0
- pivtools_gui/post_processing/POD/views.py +1096 -0
- pivtools_gui/post_processing/__init__.py +0 -0
- pivtools_gui/static/404.html +1 -0
- pivtools_gui/static/_next/static/chunks/117-d5793c8e79de5511.js +2 -0
- pivtools_gui/static/_next/static/chunks/484-cfa8b9348ce4f00e.js +1 -0
- pivtools_gui/static/_next/static/chunks/869-320a6b9bdafbb6d3.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/_not-found/page-12f067ceb7415e55.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/layout-b907d5f31ac82e9d.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/page-334cc4e8444cde2f.js +1 -0
- pivtools_gui/static/_next/static/chunks/fd9d1056-ad15f396ddf9b7e5.js +1 -0
- pivtools_gui/static/_next/static/chunks/framework-f66176bb897dc684.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-a1b3ced4d5f6d998.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-app-8a63c6f5e7baee11.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_app-72b849fbd24ac258.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_error-7ba65e1336b92748.js +1 -0
- pivtools_gui/static/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- pivtools_gui/static/_next/static/chunks/webpack-4a8ca7c99e9bb3d8.js +1 -0
- pivtools_gui/static/_next/static/css/7d3f2337d7ea12a5.css +3 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_buildManifest.js +1 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_ssgManifest.js +1 -0
- pivtools_gui/static/file.svg +1 -0
- pivtools_gui/static/globe.svg +1 -0
- pivtools_gui/static/grid.svg +8 -0
- pivtools_gui/static/index.html +1 -0
- pivtools_gui/static/index.txt +8 -0
- pivtools_gui/static/next.svg +1 -0
- pivtools_gui/static/vercel.svg +1 -0
- pivtools_gui/static/window.svg +1 -0
- pivtools_gui/stereo_reconstruction/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/views.py +1985 -0
- pivtools_gui/stereo_reconstruction/stereo_calibration_production.py +606 -0
- pivtools_gui/stereo_reconstruction/stereo_reconstruction_production.py +544 -0
- pivtools_gui/utils.py +63 -0
- pivtools_gui/vector_loading.py +248 -0
- pivtools_gui/vector_merging/__init__.py +1 -0
- pivtools_gui/vector_merging/app/__init__.py +1 -0
- pivtools_gui/vector_merging/app/views.py +759 -0
- pivtools_gui/vector_statistics/app/__init__.py +1 -0
- pivtools_gui/vector_statistics/app/views.py +710 -0
- pivtools_gui/vector_statistics/ensemble_statistics.py +49 -0
- pivtools_gui/vector_statistics/instantaneous_statistics.py +311 -0
- pivtools_gui/video_maker/__init__.py +0 -0
- pivtools_gui/video_maker/app/__init__.py +0 -0
- pivtools_gui/video_maker/app/views.py +436 -0
- pivtools_gui/video_maker/video_maker.py +662 -0
|
@@ -0,0 +1,662 @@
|
|
|
1
|
+
import glob
|
|
2
|
+
import re
|
|
3
|
+
import shutil
|
|
4
|
+
import subprocess
|
|
5
|
+
import sys
|
|
6
|
+
import time
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Callable, List, Optional, Tuple
|
|
11
|
+
import os
|
|
12
|
+
import cv2
|
|
13
|
+
import matplotlib.colors as mpl_colors
|
|
14
|
+
import matplotlib.pyplot as plt
|
|
15
|
+
import numpy as np
|
|
16
|
+
from loguru import logger
|
|
17
|
+
from scipy.io import loadmat
|
|
18
|
+
|
|
19
|
+
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
20
|
+
|
|
21
|
+
from ..vector_loading import read_mat_contents
|
|
22
|
+
|
|
23
|
+
# Constants for optimization
|
|
24
|
+
DEFAULT_BATCH_SIZE = 10 # Files to preload for processing
|
|
25
|
+
LIMIT_SAMPLE_SIZE = 50 # Files for limit computation
|
|
26
|
+
LUT_SIZE = 1024 # LUT resolution for color mapping
|
|
27
|
+
PERCENTILE_LOWER = 5
|
|
28
|
+
PERCENTILE_UPPER = 95
|
|
29
|
+
|
|
30
|
+
# ------------------------- Settings -------------------------
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
@dataclass
|
|
34
|
+
class PlotSettings:
|
|
35
|
+
corners: tuple | None = None # (x0, y0, x1, y1)
|
|
36
|
+
|
|
37
|
+
variableName: str = ""
|
|
38
|
+
variableUnits: str = ""
|
|
39
|
+
length_units: str = "mm"
|
|
40
|
+
title: str = ""
|
|
41
|
+
|
|
42
|
+
save_name: str | None = None
|
|
43
|
+
save_extension: str = ".png"
|
|
44
|
+
save_varle: bool = False
|
|
45
|
+
|
|
46
|
+
cmap: str | None = None
|
|
47
|
+
levels: int | list = 500
|
|
48
|
+
lower_limit: float | None = None
|
|
49
|
+
upper_limit: float | None = None
|
|
50
|
+
symmetric_around_zero: bool = True
|
|
51
|
+
|
|
52
|
+
_xlabel: str = "x"
|
|
53
|
+
_ylabel: str = "y"
|
|
54
|
+
_fontsize: int = 12
|
|
55
|
+
_title_fontsize: int = 14
|
|
56
|
+
|
|
57
|
+
# New: optional coordinates
|
|
58
|
+
coords_x: np.ndarray | None = None
|
|
59
|
+
coords_y: np.ndarray | None = None
|
|
60
|
+
|
|
61
|
+
# Video options
|
|
62
|
+
fps: int = 30
|
|
63
|
+
out_path: str = "field.mp4"
|
|
64
|
+
mask_rgb: Tuple[int, int, int] = (200, 200, 200) # RGB for masked pixels
|
|
65
|
+
|
|
66
|
+
# Quality knobs
|
|
67
|
+
use_ffmpeg: bool = True # only ffmpeg supported
|
|
68
|
+
crf: int = 18 # tuned for compatible H.264
|
|
69
|
+
codec: str = "libx264" # ensure H.264 by default
|
|
70
|
+
pix_fmt: str = "yuv420p" # ensure maximum compatibility (Windows players)
|
|
71
|
+
preset: str = "slow" # encoding speed/size tradeoff
|
|
72
|
+
dither: bool = False # Disabled by default to avoid graininess
|
|
73
|
+
dither_strength: float = 0.0001 # Much lower strength when enabled
|
|
74
|
+
upscale: Optional[float | Tuple[int, int]] = (
|
|
75
|
+
None # e.g. 2.0 or (H_out, W_out) or None (keep native)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
# Extra ffmpeg args (appended to the ffmpeg command) - use this to tune quality further
|
|
79
|
+
ffmpeg_extra_args: Tuple[str, ...] | List[str] = ()
|
|
80
|
+
ffmpeg_loglevel: str = "warning"
|
|
81
|
+
|
|
82
|
+
# For progress updates
|
|
83
|
+
progress_callback: Optional[Callable[[int, int, str], None]] = None
|
|
84
|
+
|
|
85
|
+
# Test mode attributes
|
|
86
|
+
test_mode: bool = False
|
|
87
|
+
test_frames: Optional[int] = None
|
|
88
|
+
|
|
89
|
+
# Noise reduction options
|
|
90
|
+
apply_smoothing: bool = True # Enable light smoothing by default
|
|
91
|
+
smoothing_sigma: float = 0.8 # Gaussian smoothing strength
|
|
92
|
+
median_filter_size: int = 3 # Median filter to remove salt-and-pepper noise
|
|
93
|
+
|
|
94
|
+
@property
|
|
95
|
+
def xlabel(self):
|
|
96
|
+
if self.length_units:
|
|
97
|
+
return f"{self._xlabel} ({self.length_units})"
|
|
98
|
+
return self._xlabel
|
|
99
|
+
|
|
100
|
+
@property
|
|
101
|
+
def ylabel(self):
|
|
102
|
+
if self.length_units:
|
|
103
|
+
return f"{self._ylabel} ({self.length_units})"
|
|
104
|
+
return self._ylabel
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# ------------------------- Helpers -------------------------
|
|
108
|
+
|
|
109
|
+
_num_re = re.compile(r"(\d+)")
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
def _resolve_upscale(
|
|
113
|
+
h: int, w: int, upscale: Optional[float | Tuple[int, int]]
|
|
114
|
+
) -> Tuple[int, int]:
|
|
115
|
+
"""Return (H_out, W_out). `upscale` can be None, a float factor, or (H, W)."""
|
|
116
|
+
if upscale is None or upscale == 1.0:
|
|
117
|
+
H = h
|
|
118
|
+
W = w
|
|
119
|
+
elif isinstance(upscale, (int, float)):
|
|
120
|
+
H = int(round(h * float(upscale)))
|
|
121
|
+
W = int(round(w * float(upscale)))
|
|
122
|
+
else: # assume (H, W) tuple
|
|
123
|
+
target_h, target_w = upscale
|
|
124
|
+
aspect_ratio = w / h
|
|
125
|
+
# Fit to the largest possible size that matches the aspect ratio
|
|
126
|
+
if target_w / target_h > aspect_ratio:
|
|
127
|
+
H = target_h
|
|
128
|
+
W = int(target_h * aspect_ratio)
|
|
129
|
+
else:
|
|
130
|
+
W = target_w
|
|
131
|
+
H = int(target_w / aspect_ratio)
|
|
132
|
+
# ensure even dims (important for yuv420p, many players/codecs)
|
|
133
|
+
if H % 2:
|
|
134
|
+
H += 1
|
|
135
|
+
if W % 2:
|
|
136
|
+
W += 1
|
|
137
|
+
return H, W
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _natural_key(p: Path) -> List:
|
|
141
|
+
s = str(p)
|
|
142
|
+
parts = _num_re.split(s)
|
|
143
|
+
parts[1::2] = [int(n) for n in parts[1::2]]
|
|
144
|
+
return parts
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _select_variable_from_arrs(
|
|
148
|
+
arrs: np.ndarray, filepath: str, var: str, run_index: int = 0
|
|
149
|
+
) -> Tuple[np.ndarray, Optional[np.ndarray]]:
|
|
150
|
+
"""Extract variable and mask from arrays or MAT file, selecting the specified run index for multi-run data."""
|
|
151
|
+
|
|
152
|
+
# Debug: Check if var is actually a numpy array (which would be an error in calling code)
|
|
153
|
+
if isinstance(var, np.ndarray):
|
|
154
|
+
logger.error(f"ERROR: var parameter is a numpy array instead of string! var.shape={var.shape}, var.dtype={var.dtype}")
|
|
155
|
+
logger.error(f"This suggests a bug in the calling code. Defaulting to 'ux'")
|
|
156
|
+
var = "ux" # Default to ux as a fallback
|
|
157
|
+
elif not isinstance(var, (str, int)):
|
|
158
|
+
logger.error(f"ERROR: var parameter has unexpected type {type(var)}: {var}")
|
|
159
|
+
logger.error(f"Converting to string as fallback")
|
|
160
|
+
var = str(var)
|
|
161
|
+
|
|
162
|
+
# ndarray case (common path)
|
|
163
|
+
if isinstance(arrs, np.ndarray):
|
|
164
|
+
try:
|
|
165
|
+
if arrs.ndim == 4:
|
|
166
|
+
# Common layout: (R, N, H, W) with N>=3 (ux=0, uy=1, b_mask=2), R is runs
|
|
167
|
+
# Validate run_index
|
|
168
|
+
if not (0 <= run_index < arrs.shape[0]):
|
|
169
|
+
logger.warning(f"run_index {run_index} out of bounds for {filepath}, using 0")
|
|
170
|
+
run_index = 0
|
|
171
|
+
var_idx = None
|
|
172
|
+
if isinstance(var, str):
|
|
173
|
+
if var == "ux":
|
|
174
|
+
var_idx = 0
|
|
175
|
+
elif var == "uy":
|
|
176
|
+
var_idx = 1
|
|
177
|
+
elif var == "mag": # Calculate magnitude for vector field
|
|
178
|
+
ux = arrs[run_index, 0]
|
|
179
|
+
uy = arrs[run_index, 1]
|
|
180
|
+
arr = np.sqrt(ux**2 + uy**2)
|
|
181
|
+
b_mask = arrs[run_index, 2] if arrs.shape[1] > 2 else None
|
|
182
|
+
return arr, (b_mask if b_mask is not None else None)
|
|
183
|
+
else:
|
|
184
|
+
# allow numeric string like "0"/"1"
|
|
185
|
+
try:
|
|
186
|
+
var_idx = int(var)
|
|
187
|
+
except Exception:
|
|
188
|
+
var_idx = None
|
|
189
|
+
elif isinstance(var, int):
|
|
190
|
+
var_idx = var
|
|
191
|
+
|
|
192
|
+
if var_idx is not None and 0 <= var_idx < arrs.shape[1]:
|
|
193
|
+
arr = arrs[run_index, var_idx]
|
|
194
|
+
b_mask = arrs[run_index, 2] if arrs.shape[1] > 2 else None
|
|
195
|
+
if arr.ndim != 2:
|
|
196
|
+
raise ValueError(f"Expected 2D array for {var} in {filepath} (run_index {run_index}), but got {arr.ndim}D with shape {arr.shape}. The MAT file may contain 1D data for this run; try a different run (e.g., run=1).")
|
|
197
|
+
return arr, (b_mask if b_mask is not None else None)
|
|
198
|
+
elif arrs.ndim == 3:
|
|
199
|
+
# Layout: (N, H, W) with N>=3 (ux=0, uy=1, b_mask=2) - single run already selected
|
|
200
|
+
var_idx = None
|
|
201
|
+
if isinstance(var, str):
|
|
202
|
+
if var == "ux":
|
|
203
|
+
var_idx = 0
|
|
204
|
+
elif var == "uy":
|
|
205
|
+
var_idx = 1
|
|
206
|
+
elif var == "mag": # Calculate magnitude for vector field
|
|
207
|
+
ux = arrs[0]
|
|
208
|
+
uy = arrs[1]
|
|
209
|
+
arr = np.sqrt(ux**2 + uy**2)
|
|
210
|
+
b_mask = arrs[2] if arrs.shape[0] > 2 else None
|
|
211
|
+
return arr, (b_mask if b_mask is not None else None)
|
|
212
|
+
else:
|
|
213
|
+
# allow numeric string like "0"/"1"
|
|
214
|
+
try:
|
|
215
|
+
var_idx = int(var)
|
|
216
|
+
except Exception:
|
|
217
|
+
var_idx = None
|
|
218
|
+
elif isinstance(var, int):
|
|
219
|
+
var_idx = var
|
|
220
|
+
|
|
221
|
+
if var_idx is not None and 0 <= var_idx < arrs.shape[0]:
|
|
222
|
+
arr = arrs[var_idx]
|
|
223
|
+
b_mask = arrs[2] if arrs.shape[0] > 2 else None
|
|
224
|
+
if arr.ndim != 2:
|
|
225
|
+
raise ValueError(f"Expected 2D array for {var} in {filepath} (3D case), but got {arr.ndim}D with shape {arr.shape}. The MAT file may contain 1D data.")
|
|
226
|
+
return arr, (b_mask if b_mask is not None else None)
|
|
227
|
+
else:
|
|
228
|
+
# If var_idx is invalid, default to first component (ux) for 3D arrays
|
|
229
|
+
logger.warning(f"Invalid variable '{var}' for 3D array in {filepath}, defaulting to index 0 (ux)")
|
|
230
|
+
arr = arrs[0]
|
|
231
|
+
b_mask = arrs[2] if arrs.shape[0] > 2 else None
|
|
232
|
+
if arr.ndim != 2:
|
|
233
|
+
raise ValueError(f"Expected 2D array for default variable (index 0) in {filepath} (3D case), but got {arr.ndim}D with shape {arr.shape}.")
|
|
234
|
+
# logger.debug(f"Returning default arr from 3D: arr.shape={arr.shape}, b_mask.shape={getattr(b_mask, 'shape', 'N/A')}")
|
|
235
|
+
return arr, (b_mask if b_mask is not None else None)
|
|
236
|
+
|
|
237
|
+
# fallback: flatten first item (for non-3D/4D or invalid var_idx)
|
|
238
|
+
# logger.debug(f"Fallback: arrs[0].shape={arrs[0].shape}")
|
|
239
|
+
arr = arrs[0]
|
|
240
|
+
if arr.ndim != 2:
|
|
241
|
+
raise ValueError(f"Expected 2D array for {var} in {filepath} (fallback), but got {arr.ndim}D with shape {arr.shape}. The MAT file may contain 1D data.")
|
|
242
|
+
return arr, None
|
|
243
|
+
except Exception as e:
|
|
244
|
+
logger.error(f"Error in ndarray case for {filepath}: {e}")
|
|
245
|
+
pass
|
|
246
|
+
|
|
247
|
+
# dict-like or unknown: try loadmat to find a variable by name
|
|
248
|
+
try:
|
|
249
|
+
mat = loadmat(filepath, squeeze_me=True, struct_as_record=False)
|
|
250
|
+
if var in mat:
|
|
251
|
+
arr = np.asarray(mat[var])
|
|
252
|
+
b_mask = None
|
|
253
|
+
for key in ("b_mask", "bmask", "mask", "valid_mask"):
|
|
254
|
+
if key in mat:
|
|
255
|
+
b_mask = np.asarray(mat[key])
|
|
256
|
+
break
|
|
257
|
+
return arr, b_mask
|
|
258
|
+
|
|
259
|
+
# Try to calculate magnitude if requested
|
|
260
|
+
if var == "mag" and "ux" in mat and "uy" in mat:
|
|
261
|
+
ux = np.asarray(mat["ux"])
|
|
262
|
+
uy = np.asarray(mat["uy"])
|
|
263
|
+
arr = np.sqrt(ux**2 + uy**2)
|
|
264
|
+
b_mask = None
|
|
265
|
+
for key in ("b_mask", "bmask", "mask", "valid_mask"):
|
|
266
|
+
if key in mat:
|
|
267
|
+
b_mask = np.asarray(mat[key])
|
|
268
|
+
break
|
|
269
|
+
return arr, b_mask
|
|
270
|
+
except Exception as e:
|
|
271
|
+
logger.error(f"Error loading MAT for {filepath}: {e}")
|
|
272
|
+
pass
|
|
273
|
+
|
|
274
|
+
# If arrs is dict-like, try to pull key directly
|
|
275
|
+
try:
|
|
276
|
+
if hasattr(arrs, "get") and not isinstance(arrs, np.ndarray):
|
|
277
|
+
# Only proceed if it's actually dict-like and not a numpy array
|
|
278
|
+
if var in arrs:
|
|
279
|
+
arr = np.asarray(arrs[var])
|
|
280
|
+
b_mask = arrs.get("b_mask", arrs.get("mask", None))
|
|
281
|
+
|
|
282
|
+
return arr, (np.asarray(b_mask) if b_mask is not None else None)
|
|
283
|
+
|
|
284
|
+
# Try to calculate magnitude if requested
|
|
285
|
+
if var == "mag" and "ux" in arrs and "uy" in arrs:
|
|
286
|
+
ux = np.asarray(arrs["ux"])
|
|
287
|
+
uy = np.asarray(arrs["uy"])
|
|
288
|
+
arr = np.sqrt(ux**2 + uy**2)
|
|
289
|
+
b_mask = arrs.get("b_mask", arrs.get("mask", None))
|
|
290
|
+
|
|
291
|
+
return arr, (np.asarray(b_mask) if b_mask is not None else None)
|
|
292
|
+
except Exception as e:
|
|
293
|
+
logger.error(f"Error in dict case for {filepath}: {e}")
|
|
294
|
+
pass
|
|
295
|
+
|
|
296
|
+
# give up with a clear error
|
|
297
|
+
raise ValueError(f"Unable to extract variable '{var}' from {filepath}")
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
def _compute_global_limits_from_files(
|
|
301
|
+
files: List[Path], var: str, settings: PlotSettings, run_index: int = 0
|
|
302
|
+
) -> Tuple[float, float, bool, float, float]:
|
|
303
|
+
"""Compute limits using parallel processing for efficiency."""
|
|
304
|
+
if settings.lower_limit is not None and settings.upper_limit is not None:
|
|
305
|
+
vmin = float(settings.lower_limit)
|
|
306
|
+
vmax = float(settings.upper_limit)
|
|
307
|
+
use_two = settings.symmetric_around_zero and (vmin < 0 < vmax)
|
|
308
|
+
return vmin, vmax, use_two, vmin, vmax
|
|
309
|
+
|
|
310
|
+
files_to_check = (
|
|
311
|
+
files[:LIMIT_SAMPLE_SIZE] if len(files) > LIMIT_SAMPLE_SIZE else files
|
|
312
|
+
)
|
|
313
|
+
all_values = []
|
|
314
|
+
|
|
315
|
+
def process_file(f: Path) -> Optional[np.ndarray]:
|
|
316
|
+
try:
|
|
317
|
+
arrs = read_mat_contents(str(f), run_index=run_index)
|
|
318
|
+
arr, b_mask = _select_variable_from_arrs(arrs, str(f), var, 0) # Run already selected by read_mat_contents
|
|
319
|
+
masked = np.ma.array(
|
|
320
|
+
arr, mask=b_mask.astype(bool) if b_mask is not None else None
|
|
321
|
+
)
|
|
322
|
+
return masked.compressed() if masked.count() > 0 else None
|
|
323
|
+
except Exception:
|
|
324
|
+
return None
|
|
325
|
+
|
|
326
|
+
with ThreadPoolExecutor(max_workers = min(os.cpu_count(), 8)) as executor:
|
|
327
|
+
futures = [executor.submit(process_file, f) for f in files_to_check]
|
|
328
|
+
for future in as_completed(futures):
|
|
329
|
+
result = future.result()
|
|
330
|
+
if result is not None:
|
|
331
|
+
all_values.extend(result)
|
|
332
|
+
|
|
333
|
+
if not all_values:
|
|
334
|
+
actual_min = actual_max = 0.0
|
|
335
|
+
vmin = -1.0
|
|
336
|
+
vmax = 1.0
|
|
337
|
+
else:
|
|
338
|
+
all_values = np.array(all_values)
|
|
339
|
+
actual_min = float(np.min(all_values))
|
|
340
|
+
actual_max = float(np.max(all_values))
|
|
341
|
+
vmin = (
|
|
342
|
+
float(np.percentile(all_values, PERCENTILE_LOWER))
|
|
343
|
+
if settings.lower_limit is None
|
|
344
|
+
else float(settings.lower_limit)
|
|
345
|
+
)
|
|
346
|
+
vmax = (
|
|
347
|
+
float(np.percentile(all_values, PERCENTILE_UPPER))
|
|
348
|
+
if settings.upper_limit is None
|
|
349
|
+
else float(settings.upper_limit)
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
use_two = False
|
|
353
|
+
if settings.symmetric_around_zero and vmin < 0 < vmax:
|
|
354
|
+
vabs = max(abs(vmin), abs(vmax))
|
|
355
|
+
vmin, vmax = -vabs, vabs
|
|
356
|
+
use_two = True
|
|
357
|
+
|
|
358
|
+
return vmin, vmax, use_two, actual_min, actual_max
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _make_lut(
|
|
362
|
+
cmap_name: Optional[str], use_two_slope: bool, vmin: float, vmax: float
|
|
363
|
+
) -> np.ndarray:
|
|
364
|
+
"""Create LUT with caching for reuse."""
|
|
365
|
+
# 1024-step LUT to reduce banding before codec quantization
|
|
366
|
+
if cmap_name == "default":
|
|
367
|
+
cmap_name = None
|
|
368
|
+
if cmap_name is not None:
|
|
369
|
+
cmap = plt.get_cmap(cmap_name)
|
|
370
|
+
else:
|
|
371
|
+
if use_two_slope:
|
|
372
|
+
cmap = plt.get_cmap("bwr")
|
|
373
|
+
else:
|
|
374
|
+
bwr = plt.get_cmap("bwr")
|
|
375
|
+
if vmax <= 0:
|
|
376
|
+
colors = bwr(np.linspace(0.0, 0.5, 256))
|
|
377
|
+
cmap = mpl_colors.LinearSegmentedColormap.from_list("bwr_lower", colors)
|
|
378
|
+
else:
|
|
379
|
+
colors = bwr(np.linspace(0.5, 1.0, 256))
|
|
380
|
+
cmap = mpl_colors.LinearSegmentedColormap.from_list("bwr_upper", colors)
|
|
381
|
+
lut = (cmap(np.linspace(0, 1, LUT_SIZE))[:, :3] * 255).astype(
|
|
382
|
+
np.uint8
|
|
383
|
+
) # (1024,3) RGB
|
|
384
|
+
return lut
|
|
385
|
+
|
|
386
|
+
|
|
387
|
+
def _to_uint16_var(frame: np.ndarray, vmin: float, vmax: float) -> np.ndarray:
|
|
388
|
+
"""Vectorized index computation."""
|
|
389
|
+
norm = (frame - vmin) / (vmax - vmin)
|
|
390
|
+
return np.clip((norm * (LUT_SIZE - 1)).round(), 0, LUT_SIZE - 1).astype(np.uint16)
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
def _apply_noise_reduction(field: np.ndarray, settings: PlotSettings) -> np.ndarray:
|
|
394
|
+
"""Apply smoothing and filtering efficiently."""
|
|
395
|
+
if not getattr(settings, "apply_smoothing", True):
|
|
396
|
+
return field
|
|
397
|
+
field_smooth = field.astype(np.float32)
|
|
398
|
+
median_size = getattr(settings, "median_filter_size", 3)
|
|
399
|
+
if median_size > 1:
|
|
400
|
+
field_smooth = cv2.medianBlur(field_smooth, median_size)
|
|
401
|
+
sigma = getattr(settings, "smoothing_sigma", 0.8)
|
|
402
|
+
if sigma > 0:
|
|
403
|
+
field_smooth = cv2.GaussianBlur(field_smooth, (0, 0), sigma)
|
|
404
|
+
return field_smooth
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
# ------------------------- Writers (FFmpeg + fallback OpenCV) -------------------------
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
class FFmpegVideoWriter:
|
|
411
|
+
def __init__(
|
|
412
|
+
self,
|
|
413
|
+
path,
|
|
414
|
+
width,
|
|
415
|
+
height,
|
|
416
|
+
fps=30,
|
|
417
|
+
crf=18,
|
|
418
|
+
codec="libx264",
|
|
419
|
+
pix_fmt="yuv420p",
|
|
420
|
+
preset="slow",
|
|
421
|
+
extra_args=None,
|
|
422
|
+
loglevel="warning",
|
|
423
|
+
):
|
|
424
|
+
if shutil.which("ffmpeg") is None:
|
|
425
|
+
raise RuntimeError("ffmpeg not found on PATH")
|
|
426
|
+
path = Path(path).resolve()
|
|
427
|
+
cmd = [
|
|
428
|
+
"ffmpeg",
|
|
429
|
+
"-y",
|
|
430
|
+
"-loglevel",
|
|
431
|
+
loglevel,
|
|
432
|
+
"-f",
|
|
433
|
+
"rawvideo",
|
|
434
|
+
"-vcodec",
|
|
435
|
+
"rawvideo",
|
|
436
|
+
"-pix_fmt",
|
|
437
|
+
"rgb24",
|
|
438
|
+
"-s",
|
|
439
|
+
f"{width}x{height}",
|
|
440
|
+
"-r",
|
|
441
|
+
str(fps),
|
|
442
|
+
"-i",
|
|
443
|
+
"-",
|
|
444
|
+
"-an",
|
|
445
|
+
"-vcodec",
|
|
446
|
+
codec,
|
|
447
|
+
"-pix_fmt",
|
|
448
|
+
pix_fmt,
|
|
449
|
+
"-crf",
|
|
450
|
+
str(crf),
|
|
451
|
+
"-preset",
|
|
452
|
+
preset,
|
|
453
|
+
"-movflags",
|
|
454
|
+
"+faststart",
|
|
455
|
+
]
|
|
456
|
+
# append any user-supplied extra args
|
|
457
|
+
if extra_args:
|
|
458
|
+
cmd += list(extra_args)
|
|
459
|
+
cmd.append(str(path))
|
|
460
|
+
|
|
461
|
+
# Capture stderr so the caller can see ffmpeg warnings and tuning info when we close
|
|
462
|
+
# Annotate proc for type-checkers
|
|
463
|
+
self.proc: subprocess.Popen = subprocess.Popen(
|
|
464
|
+
cmd,
|
|
465
|
+
stdin=subprocess.PIPE,
|
|
466
|
+
stdout=subprocess.DEVNULL,
|
|
467
|
+
stderr=subprocess.PIPE,
|
|
468
|
+
)
|
|
469
|
+
self.width, self.height = width, height
|
|
470
|
+
self.path = str(path)
|
|
471
|
+
|
|
472
|
+
def write(self, rgb_frame_uint8):
|
|
473
|
+
# mypy/pylance treat proc.stdin as Optional; guard at runtime
|
|
474
|
+
stdin = self.proc.stdin
|
|
475
|
+
if stdin is None:
|
|
476
|
+
raise RuntimeError("ffmpeg stdin is not available")
|
|
477
|
+
try:
|
|
478
|
+
stdin.write(rgb_frame_uint8.tobytes())
|
|
479
|
+
except BrokenPipeError:
|
|
480
|
+
_, stderr = self.proc.communicate()
|
|
481
|
+
if stderr:
|
|
482
|
+
msg = stderr.decode(errors="replace").strip()
|
|
483
|
+
print(f"ffmpeg stderr: {msg}")
|
|
484
|
+
raise RuntimeError("ffmpeg process has exited (broken pipe)")
|
|
485
|
+
|
|
486
|
+
def release(self):
|
|
487
|
+
stdin = self.proc.stdin
|
|
488
|
+
# Only close if not already closed
|
|
489
|
+
if stdin is not None and not stdin.closed:
|
|
490
|
+
stdin.close()
|
|
491
|
+
# Only call communicate if stdin is not closed
|
|
492
|
+
try:
|
|
493
|
+
_, stderr = self.proc.communicate()
|
|
494
|
+
except ValueError:
|
|
495
|
+
# Already closed, ignore
|
|
496
|
+
stderr = None
|
|
497
|
+
if stderr:
|
|
498
|
+
try:
|
|
499
|
+
msg = stderr.decode(errors="replace").strip()
|
|
500
|
+
except Exception:
|
|
501
|
+
msg = str(stderr)
|
|
502
|
+
if msg:
|
|
503
|
+
print(f"ffmpeg stderr for {self.path}:\n", msg)
|
|
504
|
+
|
|
505
|
+
|
|
506
|
+
# ------------------------- Core: high-quality renderer -------------------------
|
|
507
|
+
|
|
508
|
+
|
|
509
|
+
def make_video_from_scalar(
|
|
510
|
+
folder: str | Path,
|
|
511
|
+
var: str = "uy",
|
|
512
|
+
pattern: str = "[0-9]*.mat",
|
|
513
|
+
settings: Optional[PlotSettings] = None,
|
|
514
|
+
cancel_event=None,
|
|
515
|
+
run_index: int = 0,
|
|
516
|
+
) -> dict:
|
|
517
|
+
"""
|
|
518
|
+
Optimized video generation with batching and vectorization.
|
|
519
|
+
Validates inputs, handles errors gracefully, and optimizes memory usage.
|
|
520
|
+
run_index: int, default 0 - specifies which run (0-based index) to extract from multi-run .mat files (e.g., 4D arrays with shape (R, N, H, W)).
|
|
521
|
+
"""
|
|
522
|
+
t0 = time.time()
|
|
523
|
+
folder = Path(folder)
|
|
524
|
+
if not folder.exists() or not folder.is_dir():
|
|
525
|
+
raise ValueError(f"Invalid folder path: {folder}")
|
|
526
|
+
|
|
527
|
+
files = sorted(
|
|
528
|
+
[Path(p) for p in glob.glob(str(folder / pattern))], key=_natural_key
|
|
529
|
+
)
|
|
530
|
+
files = [f for f in files if "coordinate" not in f.name.lower()]
|
|
531
|
+
if not files:
|
|
532
|
+
raise FileNotFoundError(f"No MAT files found in {folder} matching '{pattern}'")
|
|
533
|
+
|
|
534
|
+
if settings is None:
|
|
535
|
+
settings = PlotSettings()
|
|
536
|
+
if hasattr(settings, "test_mode") and getattr(settings, "test_mode", False):
|
|
537
|
+
test_frames = getattr(settings, "test_frames", 50)
|
|
538
|
+
files = files[:test_frames]
|
|
539
|
+
|
|
540
|
+
# Validate that the run_index exists in the files
|
|
541
|
+
# Note: read_mat_contents will raise ValueError if run_index is invalid
|
|
542
|
+
try:
|
|
543
|
+
test_arrs = read_mat_contents(str(files[0]), run_index=run_index)
|
|
544
|
+
# Check if the returned data contains any non-zero elements (i.e., is not empty)
|
|
545
|
+
if isinstance(test_arrs, np.ndarray):
|
|
546
|
+
if test_arrs.size == 0 or not np.any(test_arrs):
|
|
547
|
+
raise ValueError(f"Run not found: run_index {run_index} contains empty/zero data in {files[0]}")
|
|
548
|
+
else:
|
|
549
|
+
raise ValueError(f"Run not found: unexpected data type returned for run_index {run_index}")
|
|
550
|
+
except ValueError as e:
|
|
551
|
+
# read_mat_contents already validates run_index and raises informative errors
|
|
552
|
+
if "Invalid run_index" in str(e) or "No valid runs" in str(e) or "Run not found" in str(e):
|
|
553
|
+
raise ValueError(f"Run not found: {e}")
|
|
554
|
+
raise
|
|
555
|
+
except Exception as e:
|
|
556
|
+
logger.error(f"Failed to validate run_index {run_index} in {files[0]}: {e}")
|
|
557
|
+
raise ValueError(f"Run not found: unable to load data with run_index {run_index}")
|
|
558
|
+
|
|
559
|
+
# Compute limits in parallel
|
|
560
|
+
try:
|
|
561
|
+
vmin, vmax, use_two, actual_min, actual_max = _compute_global_limits_from_files(
|
|
562
|
+
files, var, settings, run_index
|
|
563
|
+
)
|
|
564
|
+
except Exception as e:
|
|
565
|
+
logger.error(f"Failed to compute limits: {e}")
|
|
566
|
+
raise
|
|
567
|
+
|
|
568
|
+
lut = _make_lut(settings.cmap, use_two, vmin, vmax)
|
|
569
|
+
|
|
570
|
+
# Get dimensions from first file
|
|
571
|
+
try:
|
|
572
|
+
arrs0 = read_mat_contents(str(files[0]), run_index=run_index)
|
|
573
|
+
arr0, _ = _select_variable_from_arrs(arrs0, str(files[0]), var, 0) # Run already selected by read_mat_contents
|
|
574
|
+
logger.debug(f"First file arr0.shape={arr0.shape}, arr0.ndim={arr0.ndim}")
|
|
575
|
+
if arr0.ndim != 2:
|
|
576
|
+
raise ValueError(f"Expected 2D array for {var} in {files[0]}, but got {arr0.ndim}D with shape {arr0.shape}")
|
|
577
|
+
H, W = arr0.shape
|
|
578
|
+
if H == 0 or W == 0:
|
|
579
|
+
raise ValueError(f"Invalid dimensions {H}x{W} in {files[0]}")
|
|
580
|
+
except Exception as e:
|
|
581
|
+
logger.error(f"Failed to read first file {files[0]}: {e}")
|
|
582
|
+
raise
|
|
583
|
+
|
|
584
|
+
Hout, Wout = _resolve_upscale(H, W, settings.upscale)
|
|
585
|
+
|
|
586
|
+
try:
|
|
587
|
+
writer = FFmpegVideoWriter(
|
|
588
|
+
settings.out_path,
|
|
589
|
+
Wout,
|
|
590
|
+
Hout,
|
|
591
|
+
fps=settings.fps,
|
|
592
|
+
crf=settings.crf,
|
|
593
|
+
codec=settings.codec,
|
|
594
|
+
pix_fmt=settings.pix_fmt,
|
|
595
|
+
preset=settings.preset,
|
|
596
|
+
extra_args=settings.ffmpeg_extra_args,
|
|
597
|
+
loglevel=settings.ffmpeg_loglevel,
|
|
598
|
+
)
|
|
599
|
+
except RuntimeError as e:
|
|
600
|
+
logger.error(f"FFmpeg writer initialization failed: {e}")
|
|
601
|
+
raise
|
|
602
|
+
|
|
603
|
+
total_frames = len(files)
|
|
604
|
+
for i in range(0, total_frames, DEFAULT_BATCH_SIZE):
|
|
605
|
+
if cancel_event and cancel_event.is_set():
|
|
606
|
+
logger.info("Video creation cancelled")
|
|
607
|
+
break
|
|
608
|
+
batch_files = files[i : i + DEFAULT_BATCH_SIZE]
|
|
609
|
+
for j, f in enumerate(batch_files):
|
|
610
|
+
try:
|
|
611
|
+
arrs = read_mat_contents(str(f), run_index=run_index)
|
|
612
|
+
field, b_mask = _select_variable_from_arrs(arrs, str(f), var, 0) # Run already selected by read_mat_contents
|
|
613
|
+
field = _apply_noise_reduction(field, settings)
|
|
614
|
+
field_indices = _to_uint16_var(field, vmin, vmax)
|
|
615
|
+
rgb = lut[field_indices]
|
|
616
|
+
if Hout != H or Wout != W:
|
|
617
|
+
rgb = cv2.resize(rgb, (Wout, Hout), interpolation=cv2.INTER_LANCZOS4)
|
|
618
|
+
b_mask = (
|
|
619
|
+
cv2.resize(
|
|
620
|
+
b_mask.astype(np.uint8),
|
|
621
|
+
(Wout, Hout),
|
|
622
|
+
interpolation=cv2.INTER_NEAREST,
|
|
623
|
+
).astype(bool)
|
|
624
|
+
if b_mask is not None
|
|
625
|
+
else None
|
|
626
|
+
)
|
|
627
|
+
if b_mask is not None:
|
|
628
|
+
rgb[b_mask] = settings.mask_rgb
|
|
629
|
+
writer.write(rgb)
|
|
630
|
+
if settings.progress_callback:
|
|
631
|
+
settings.progress_callback(i + j + 1, total_frames)
|
|
632
|
+
except Exception as e:
|
|
633
|
+
logger.error(f"Error processing file {f}: {e}")
|
|
634
|
+
continue # Skip bad files but continue processing
|
|
635
|
+
# Clear batch to free memory immediately
|
|
636
|
+
del batch_files
|
|
637
|
+
|
|
638
|
+
try:
|
|
639
|
+
writer.release()
|
|
640
|
+
except Exception as e:
|
|
641
|
+
logger.error(f"Error releasing writer: {e}")
|
|
642
|
+
|
|
643
|
+
t1 = time.time()
|
|
644
|
+
return {
|
|
645
|
+
"out_path": settings.out_path,
|
|
646
|
+
"vmin": vmin,
|
|
647
|
+
"vmax": vmax,
|
|
648
|
+
"actual_min": actual_min,
|
|
649
|
+
"actual_max": actual_max,
|
|
650
|
+
"use_two_slope": use_two,
|
|
651
|
+
"fps": settings.fps,
|
|
652
|
+
"frames": len(files),
|
|
653
|
+
"shape": (H, W),
|
|
654
|
+
"shape_out": (Hout, Wout),
|
|
655
|
+
"variable": var,
|
|
656
|
+
"cmap": settings.cmap,
|
|
657
|
+
"elapsed_sec": round(t1 - t0, 3),
|
|
658
|
+
"writer": "ffmpeg",
|
|
659
|
+
"pix_fmt": getattr(settings, "pix_fmt", None),
|
|
660
|
+
"crf": getattr(settings, "crf", None),
|
|
661
|
+
"codec": getattr(settings, "codec", None),
|
|
662
|
+
}
|