pivtools 0.1.3__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pivtools-0.1.3.dist-info/METADATA +222 -0
- pivtools-0.1.3.dist-info/RECORD +127 -0
- pivtools-0.1.3.dist-info/WHEEL +5 -0
- pivtools-0.1.3.dist-info/entry_points.txt +3 -0
- pivtools-0.1.3.dist-info/top_level.txt +3 -0
- pivtools_cli/__init__.py +5 -0
- pivtools_cli/_build_marker.c +25 -0
- pivtools_cli/_build_marker.cp311-win_amd64.pyd +0 -0
- pivtools_cli/cli.py +225 -0
- pivtools_cli/example.py +139 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.c +334 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.h +22 -0
- pivtools_cli/lib/common.h +36 -0
- pivtools_cli/lib/interp2custom.c +146 -0
- pivtools_cli/lib/interp2custom.h +48 -0
- pivtools_cli/lib/peak_locate_gsl.c +711 -0
- pivtools_cli/lib/peak_locate_gsl.h +40 -0
- pivtools_cli/lib/peak_locate_gsl_print.c +736 -0
- pivtools_cli/lib/peak_locate_lm.c +751 -0
- pivtools_cli/lib/peak_locate_lm.h +27 -0
- pivtools_cli/lib/xcorr.c +342 -0
- pivtools_cli/lib/xcorr.h +31 -0
- pivtools_cli/lib/xcorr_cache.c +78 -0
- pivtools_cli/lib/xcorr_cache.h +26 -0
- pivtools_cli/piv/interp2custom/interp2custom.py +69 -0
- pivtools_cli/piv/piv.py +240 -0
- pivtools_cli/piv/piv_backend/base.py +825 -0
- pivtools_cli/piv/piv_backend/cpu_instantaneous.py +1005 -0
- pivtools_cli/piv/piv_backend/factory.py +28 -0
- pivtools_cli/piv/piv_backend/gpu_instantaneous.py +15 -0
- pivtools_cli/piv/piv_backend/infilling.py +445 -0
- pivtools_cli/piv/piv_backend/outlier_detection.py +306 -0
- pivtools_cli/piv/piv_backend/profile_cpu_instantaneous.py +230 -0
- pivtools_cli/piv/piv_result.py +40 -0
- pivtools_cli/piv/save_results.py +342 -0
- pivtools_cli/piv_cluster/cluster.py +108 -0
- pivtools_cli/preprocessing/filters.py +399 -0
- pivtools_cli/preprocessing/preprocess.py +79 -0
- pivtools_cli/tests/helpers.py +107 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration.py +167 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration_multi.py +553 -0
- pivtools_cli/tests/preprocessing/test_filters.py +41 -0
- pivtools_core/__init__.py +5 -0
- pivtools_core/config.py +703 -0
- pivtools_core/config.yaml +135 -0
- pivtools_core/image_handling/__init__.py +0 -0
- pivtools_core/image_handling/load_images.py +464 -0
- pivtools_core/image_handling/readers/__init__.py +53 -0
- pivtools_core/image_handling/readers/generic_readers.py +50 -0
- pivtools_core/image_handling/readers/lavision_reader.py +190 -0
- pivtools_core/image_handling/readers/registry.py +24 -0
- pivtools_core/paths.py +49 -0
- pivtools_core/vector_loading.py +248 -0
- pivtools_gui/__init__.py +3 -0
- pivtools_gui/app.py +687 -0
- pivtools_gui/calibration/__init__.py +0 -0
- pivtools_gui/calibration/app/__init__.py +0 -0
- pivtools_gui/calibration/app/views.py +1186 -0
- pivtools_gui/calibration/calibration_planar/planar_calibration_production.py +570 -0
- pivtools_gui/calibration/vector_calibration_production.py +544 -0
- pivtools_gui/config.py +703 -0
- pivtools_gui/image_handling/__init__.py +0 -0
- pivtools_gui/image_handling/load_images.py +464 -0
- pivtools_gui/image_handling/readers/__init__.py +53 -0
- pivtools_gui/image_handling/readers/generic_readers.py +50 -0
- pivtools_gui/image_handling/readers/lavision_reader.py +190 -0
- pivtools_gui/image_handling/readers/registry.py +24 -0
- pivtools_gui/masking/__init__.py +0 -0
- pivtools_gui/masking/app/__init__.py +0 -0
- pivtools_gui/masking/app/views.py +123 -0
- pivtools_gui/paths.py +49 -0
- pivtools_gui/piv_runner.py +261 -0
- pivtools_gui/pivtools.py +58 -0
- pivtools_gui/plotting/__init__.py +0 -0
- pivtools_gui/plotting/app/__init__.py +0 -0
- pivtools_gui/plotting/app/views.py +1671 -0
- pivtools_gui/plotting/plot_maker.py +220 -0
- pivtools_gui/post_processing/POD/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/views.py +647 -0
- pivtools_gui/post_processing/POD/pod_decompose.py +979 -0
- pivtools_gui/post_processing/POD/views.py +1096 -0
- pivtools_gui/post_processing/__init__.py +0 -0
- pivtools_gui/static/404.html +1 -0
- pivtools_gui/static/_next/static/chunks/117-d5793c8e79de5511.js +2 -0
- pivtools_gui/static/_next/static/chunks/484-cfa8b9348ce4f00e.js +1 -0
- pivtools_gui/static/_next/static/chunks/869-320a6b9bdafbb6d3.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/_not-found/page-12f067ceb7415e55.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/layout-b907d5f31ac82e9d.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/page-334cc4e8444cde2f.js +1 -0
- pivtools_gui/static/_next/static/chunks/fd9d1056-ad15f396ddf9b7e5.js +1 -0
- pivtools_gui/static/_next/static/chunks/framework-f66176bb897dc684.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-a1b3ced4d5f6d998.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-app-8a63c6f5e7baee11.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_app-72b849fbd24ac258.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_error-7ba65e1336b92748.js +1 -0
- pivtools_gui/static/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- pivtools_gui/static/_next/static/chunks/webpack-4a8ca7c99e9bb3d8.js +1 -0
- pivtools_gui/static/_next/static/css/7d3f2337d7ea12a5.css +3 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_buildManifest.js +1 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_ssgManifest.js +1 -0
- pivtools_gui/static/file.svg +1 -0
- pivtools_gui/static/globe.svg +1 -0
- pivtools_gui/static/grid.svg +8 -0
- pivtools_gui/static/index.html +1 -0
- pivtools_gui/static/index.txt +8 -0
- pivtools_gui/static/next.svg +1 -0
- pivtools_gui/static/vercel.svg +1 -0
- pivtools_gui/static/window.svg +1 -0
- pivtools_gui/stereo_reconstruction/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/views.py +1985 -0
- pivtools_gui/stereo_reconstruction/stereo_calibration_production.py +606 -0
- pivtools_gui/stereo_reconstruction/stereo_reconstruction_production.py +544 -0
- pivtools_gui/utils.py +63 -0
- pivtools_gui/vector_loading.py +248 -0
- pivtools_gui/vector_merging/__init__.py +1 -0
- pivtools_gui/vector_merging/app/__init__.py +1 -0
- pivtools_gui/vector_merging/app/views.py +759 -0
- pivtools_gui/vector_statistics/app/__init__.py +1 -0
- pivtools_gui/vector_statistics/app/views.py +710 -0
- pivtools_gui/vector_statistics/ensemble_statistics.py +49 -0
- pivtools_gui/vector_statistics/instantaneous_statistics.py +311 -0
- pivtools_gui/video_maker/__init__.py +0 -0
- pivtools_gui/video_maker/app/__init__.py +0 -0
- pivtools_gui/video_maker/app/views.py +436 -0
- pivtools_gui/video_maker/video_maker.py +662 -0
|
@@ -0,0 +1,248 @@
|
|
|
1
|
+
import warnings
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Optional, Sequence, Tuple
|
|
4
|
+
|
|
5
|
+
import dask
|
|
6
|
+
import dask.array as da
|
|
7
|
+
import numpy as np
|
|
8
|
+
import scipy.io
|
|
9
|
+
|
|
10
|
+
from pivtools_core.config import Config
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def read_mat_contents(
|
|
14
|
+
file_path: str, run_index: Optional[int] = None, return_all_runs: bool = False
|
|
15
|
+
) -> np.ndarray:
|
|
16
|
+
"""
|
|
17
|
+
Reads piv_result from a .mat file.
|
|
18
|
+
If multiple runs are present, selects the specified run_index (0-based).
|
|
19
|
+
If run_index is None, selects the first run with valid (non-empty) data.
|
|
20
|
+
If return_all_runs is True, returns all runs in shape (R, 3, H, W).
|
|
21
|
+
Otherwise returns shape (1, 3, H, W) for the selected run.
|
|
22
|
+
"""
|
|
23
|
+
mat = scipy.io.loadmat(file_path, struct_as_record=False, squeeze_me=True)
|
|
24
|
+
piv_result = mat["piv_result"]
|
|
25
|
+
|
|
26
|
+
# Multiple runs case: numpy array of structs
|
|
27
|
+
if isinstance(piv_result, np.ndarray) and piv_result.dtype == object:
|
|
28
|
+
total_runs = piv_result.size
|
|
29
|
+
|
|
30
|
+
if return_all_runs:
|
|
31
|
+
# Return all runs
|
|
32
|
+
all_runs = []
|
|
33
|
+
for idx in range(total_runs):
|
|
34
|
+
pr = piv_result[idx]
|
|
35
|
+
ux = np.asarray(pr.ux)
|
|
36
|
+
uy = np.asarray(pr.uy)
|
|
37
|
+
b_mask = (
|
|
38
|
+
np.asarray(pr.b_mask).astype(ux.dtype, copy=False)
|
|
39
|
+
if ux.size > 0
|
|
40
|
+
else np.array([])
|
|
41
|
+
)
|
|
42
|
+
if ux.size > 0 and uy.size > 0:
|
|
43
|
+
stacked = np.stack([ux, uy, b_mask], axis=0) # (3, H, W)
|
|
44
|
+
else:
|
|
45
|
+
# Empty run - create placeholder with consistent shape if possible
|
|
46
|
+
stacked = np.array([[], [], []]) # Will be reshaped later
|
|
47
|
+
all_runs.append(stacked)
|
|
48
|
+
return np.array(all_runs) # (R, 3, H, W)
|
|
49
|
+
|
|
50
|
+
# Single run selection (existing logic)
|
|
51
|
+
if run_index is None:
|
|
52
|
+
# Find first valid run (non-empty ux, uy)
|
|
53
|
+
for idx in range(total_runs):
|
|
54
|
+
pr = piv_result[idx]
|
|
55
|
+
ux = np.asarray(pr.ux)
|
|
56
|
+
uy = np.asarray(pr.uy)
|
|
57
|
+
if ux.size > 0 and uy.size > 0:
|
|
58
|
+
run_index = idx
|
|
59
|
+
break
|
|
60
|
+
else:
|
|
61
|
+
raise ValueError(f"No valid runs found in {file_path}")
|
|
62
|
+
if run_index < 0 or run_index >= total_runs:
|
|
63
|
+
raise ValueError(
|
|
64
|
+
f"Invalid run_index {run_index} for {file_path} (total_runs={total_runs})"
|
|
65
|
+
)
|
|
66
|
+
pr = piv_result[run_index]
|
|
67
|
+
ux = np.asarray(pr.ux)
|
|
68
|
+
uy = np.asarray(pr.uy)
|
|
69
|
+
b_mask = np.asarray(pr.b_mask).astype(ux.dtype, copy=False)
|
|
70
|
+
stacked = np.stack([ux, uy, b_mask], axis=0)[None, ...] # (1, 3, H, W)
|
|
71
|
+
return stacked
|
|
72
|
+
|
|
73
|
+
# Single run struct
|
|
74
|
+
if run_index is not None and run_index != 0:
|
|
75
|
+
raise ValueError(
|
|
76
|
+
f"Invalid run_index {run_index} for single-run file {file_path}"
|
|
77
|
+
)
|
|
78
|
+
pr = piv_result
|
|
79
|
+
ux = np.asarray(pr.ux)
|
|
80
|
+
uy = np.asarray(pr.uy)
|
|
81
|
+
b_mask = np.asarray(pr.b_mask).astype(ux.dtype, copy=False)
|
|
82
|
+
|
|
83
|
+
if return_all_runs:
|
|
84
|
+
stacked = np.stack([ux, uy, b_mask], axis=0)[None, ...] # (1, 3, H, W)
|
|
85
|
+
return stacked
|
|
86
|
+
else:
|
|
87
|
+
stacked = np.stack([ux, uy, b_mask], axis=0)[None, ...] # (1, 3, H, W)
|
|
88
|
+
return stacked
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def load_vectors_from_directory(
|
|
92
|
+
data_dir: Path, config: Config, runs: Optional[Sequence[int]] = None
|
|
93
|
+
) -> da.Array:
|
|
94
|
+
"""
|
|
95
|
+
Load .mat vector files for requested runs.
|
|
96
|
+
- runs: list of 1-based run numbers to include; if None or empty, include all runs in the files.
|
|
97
|
+
Returns Dask array with shape (N_existing, R, 3, H, W).
|
|
98
|
+
"""
|
|
99
|
+
data_dir = Path(data_dir)
|
|
100
|
+
fmt = config.vector_format # e.g. "B%05d.mat"
|
|
101
|
+
expected_paths = [data_dir / (fmt % i) for i in range(1, config.num_images + 1)]
|
|
102
|
+
existing_paths = [p for p in expected_paths if p.exists()]
|
|
103
|
+
|
|
104
|
+
missing_count = len(expected_paths) - len(existing_paths)
|
|
105
|
+
if missing_count == len(expected_paths):
|
|
106
|
+
raise FileNotFoundError(
|
|
107
|
+
f"No vector files found using pattern {fmt} in {data_dir}"
|
|
108
|
+
)
|
|
109
|
+
if missing_count:
|
|
110
|
+
warnings.warn(
|
|
111
|
+
f"{missing_count} vector files missing in {data_dir} (loaded {len(existing_paths)})"
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
# Convert runs (1-based) to zero-based indices for reading
|
|
115
|
+
zero_based_runs: Optional[Sequence[int]] = None
|
|
116
|
+
if runs:
|
|
117
|
+
zero_based_runs = [r - 1 for r in runs]
|
|
118
|
+
|
|
119
|
+
# Detect shape/dtype from first readable file
|
|
120
|
+
first_arr = None
|
|
121
|
+
for p in existing_paths:
|
|
122
|
+
try:
|
|
123
|
+
first_arr = read_mat_contents(
|
|
124
|
+
str(p), run_index=zero_based_runs[0] if zero_based_runs else None
|
|
125
|
+
)
|
|
126
|
+
# Debugging: print shape, dtype, and file info
|
|
127
|
+
if first_arr.ndim != 4:
|
|
128
|
+
warnings.warn(
|
|
129
|
+
f"[DEBUG] Unexpected array ndim={first_arr.ndim} in {p.name}"
|
|
130
|
+
)
|
|
131
|
+
break
|
|
132
|
+
except Exception as e:
|
|
133
|
+
warnings.warn(f"Failed to read {p.name} during probing: {e}")
|
|
134
|
+
raise
|
|
135
|
+
if first_arr is None:
|
|
136
|
+
raise FileNotFoundError(f"Could not read any valid vector files in {data_dir}")
|
|
137
|
+
|
|
138
|
+
shape, dtype = first_arr.shape, first_arr.dtype # (R, 3, H, W), dtype
|
|
139
|
+
|
|
140
|
+
delayed_items = [
|
|
141
|
+
dask.delayed(read_mat_contents)(
|
|
142
|
+
str(p), run_index=zero_based_runs[0] if zero_based_runs else None
|
|
143
|
+
)
|
|
144
|
+
for p in existing_paths
|
|
145
|
+
]
|
|
146
|
+
arrays = [da.from_delayed(di, shape=shape, dtype=dtype) for di in delayed_items]
|
|
147
|
+
stacked = da.stack(arrays, axis=0) # (N, R, 3, H, W)
|
|
148
|
+
return stacked.rechunk({0: config.piv_chunk_size})
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def load_coords_from_directory(
|
|
152
|
+
data_dir: Path, runs: Optional[Sequence[int]] = None
|
|
153
|
+
) -> Tuple[Sequence[np.ndarray], Sequence[np.ndarray]]:
|
|
154
|
+
"""
|
|
155
|
+
Locate and read the coordinates.mat file in data_dir and return (x_list, y_list).
|
|
156
|
+
- runs: list of 1-based run numbers to include; if None or empty, include all runs present in the coords file.
|
|
157
|
+
- Returns:
|
|
158
|
+
x_list: list of x arrays in the same order as 'runs' (or all runs if None)
|
|
159
|
+
y_list: list of y arrays in the same order as 'runs' (or all runs if None)
|
|
160
|
+
"""
|
|
161
|
+
data_dir = Path(data_dir)
|
|
162
|
+
coords_path = data_dir / "coordinates.mat"
|
|
163
|
+
if not coords_path.exists():
|
|
164
|
+
raise FileNotFoundError(f"No coordinates.mat file found in {data_dir}")
|
|
165
|
+
|
|
166
|
+
mat = scipy.io.loadmat(coords_path, struct_as_record=False, squeeze_me=True)
|
|
167
|
+
if "coordinates" not in mat:
|
|
168
|
+
raise KeyError(f"'coordinates' variable not found in {coords_path.name}")
|
|
169
|
+
coords = mat["coordinates"]
|
|
170
|
+
|
|
171
|
+
def _xy_from_struct(obj):
|
|
172
|
+
return np.asarray(obj.x), np.asarray(obj.y)
|
|
173
|
+
|
|
174
|
+
x_list, y_list = [], []
|
|
175
|
+
|
|
176
|
+
if isinstance(coords, np.ndarray) and coords.dtype == object:
|
|
177
|
+
if runs:
|
|
178
|
+
zero_based = [r - 1 for r in runs if 1 <= r <= coords.size]
|
|
179
|
+
if len(zero_based) != len(runs):
|
|
180
|
+
missing = sorted(set(runs) - set([z + 1 for z in zero_based]))
|
|
181
|
+
warnings.warn(
|
|
182
|
+
f"Skipping out-of-range run indices {missing} for coordinates"
|
|
183
|
+
)
|
|
184
|
+
else:
|
|
185
|
+
zero_based = list(range(coords.size))
|
|
186
|
+
|
|
187
|
+
for idx in zero_based:
|
|
188
|
+
x, y = _xy_from_struct(coords[idx])
|
|
189
|
+
x_list.append(x)
|
|
190
|
+
y_list.append(y)
|
|
191
|
+
else:
|
|
192
|
+
if runs and 1 not in runs:
|
|
193
|
+
warnings.warn(
|
|
194
|
+
"Requested runs do not include run 1 present in coordinates; returning empty coords"
|
|
195
|
+
)
|
|
196
|
+
return [], []
|
|
197
|
+
x, y = _xy_from_struct(coords)
|
|
198
|
+
x_list.append(x)
|
|
199
|
+
y_list.append(y)
|
|
200
|
+
|
|
201
|
+
return x_list, y_list
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def save_mask_to_mat(file_path: str, mask: np.ndarray, polygons):
|
|
205
|
+
"""
|
|
206
|
+
Save the given mask array to a .mat file.
|
|
207
|
+
"""
|
|
208
|
+
scipy.io.savemat(file_path, {"mask": mask, "polygons": polygons}, do_compression=True)
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
def read_mask_from_mat(file_path: str):
|
|
212
|
+
"""
|
|
213
|
+
Read the mask and polygons from a .mat file.
|
|
214
|
+
Returns:
|
|
215
|
+
mask: np.ndarray
|
|
216
|
+
polygons: list of dicts with fields 'index', 'name', 'points'
|
|
217
|
+
"""
|
|
218
|
+
# Load without squeeze_me to avoid 0-d array issues with single-element cells
|
|
219
|
+
# Use struct_as_record=True (default) so structs become record arrays with dict-like access
|
|
220
|
+
mat = scipy.io.loadmat(file_path, squeeze_me=False, struct_as_record=True)
|
|
221
|
+
mask = mat.get("mask", None)
|
|
222
|
+
polygons_raw = mat.get("polygons", None)
|
|
223
|
+
if mask is None or polygons_raw is None:
|
|
224
|
+
raise ValueError(f"Missing 'mask' or 'polygons' in {file_path}")
|
|
225
|
+
|
|
226
|
+
# Squeeze the mask manually if needed
|
|
227
|
+
mask = np.squeeze(mask)
|
|
228
|
+
|
|
229
|
+
# polygons_raw is a numpy object array (MATLAB cell array)
|
|
230
|
+
# Flatten it to iterate (it might be [[obj1], [obj2]] or [[obj]])
|
|
231
|
+
polygons_flat = polygons_raw.flatten()
|
|
232
|
+
|
|
233
|
+
polygons = []
|
|
234
|
+
for poly in polygons_flat:
|
|
235
|
+
# poly is a structured array (record) with named fields accessible via indexing
|
|
236
|
+
# Extract scalar values from 0-d arrays
|
|
237
|
+
idx_raw = poly['index'] if isinstance(poly, np.void) else poly['index'][0, 0]
|
|
238
|
+
name_raw = poly['name'] if isinstance(poly, np.void) else poly['name'][0, 0]
|
|
239
|
+
pts_raw = poly['points'] if isinstance(poly, np.void) else poly['points'][0, 0]
|
|
240
|
+
|
|
241
|
+
idx = int(idx_raw.item() if hasattr(idx_raw, 'item') else idx_raw)
|
|
242
|
+
name = str(name_raw.item() if hasattr(name_raw, 'item') else name_raw)
|
|
243
|
+
|
|
244
|
+
# pts might be a 2D array, convert to list of lists
|
|
245
|
+
points = pts_raw.tolist() if isinstance(pts_raw, np.ndarray) else list(pts_raw)
|
|
246
|
+
polygons.append({"index": idx, "name": name, "points": points})
|
|
247
|
+
|
|
248
|
+
return mask, polygons
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Vector merging module
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# App module for vector merging
|