pivtools 0.1.3__cp311-cp311-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pivtools-0.1.3.dist-info/METADATA +222 -0
- pivtools-0.1.3.dist-info/RECORD +127 -0
- pivtools-0.1.3.dist-info/WHEEL +5 -0
- pivtools-0.1.3.dist-info/entry_points.txt +3 -0
- pivtools-0.1.3.dist-info/top_level.txt +3 -0
- pivtools_cli/__init__.py +5 -0
- pivtools_cli/_build_marker.c +25 -0
- pivtools_cli/_build_marker.cp311-win_amd64.pyd +0 -0
- pivtools_cli/cli.py +225 -0
- pivtools_cli/example.py +139 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.c +334 -0
- pivtools_cli/lib/PIV_2d_cross_correlate.h +22 -0
- pivtools_cli/lib/common.h +36 -0
- pivtools_cli/lib/interp2custom.c +146 -0
- pivtools_cli/lib/interp2custom.h +48 -0
- pivtools_cli/lib/peak_locate_gsl.c +711 -0
- pivtools_cli/lib/peak_locate_gsl.h +40 -0
- pivtools_cli/lib/peak_locate_gsl_print.c +736 -0
- pivtools_cli/lib/peak_locate_lm.c +751 -0
- pivtools_cli/lib/peak_locate_lm.h +27 -0
- pivtools_cli/lib/xcorr.c +342 -0
- pivtools_cli/lib/xcorr.h +31 -0
- pivtools_cli/lib/xcorr_cache.c +78 -0
- pivtools_cli/lib/xcorr_cache.h +26 -0
- pivtools_cli/piv/interp2custom/interp2custom.py +69 -0
- pivtools_cli/piv/piv.py +240 -0
- pivtools_cli/piv/piv_backend/base.py +825 -0
- pivtools_cli/piv/piv_backend/cpu_instantaneous.py +1005 -0
- pivtools_cli/piv/piv_backend/factory.py +28 -0
- pivtools_cli/piv/piv_backend/gpu_instantaneous.py +15 -0
- pivtools_cli/piv/piv_backend/infilling.py +445 -0
- pivtools_cli/piv/piv_backend/outlier_detection.py +306 -0
- pivtools_cli/piv/piv_backend/profile_cpu_instantaneous.py +230 -0
- pivtools_cli/piv/piv_result.py +40 -0
- pivtools_cli/piv/save_results.py +342 -0
- pivtools_cli/piv_cluster/cluster.py +108 -0
- pivtools_cli/preprocessing/filters.py +399 -0
- pivtools_cli/preprocessing/preprocess.py +79 -0
- pivtools_cli/tests/helpers.py +107 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration.py +167 -0
- pivtools_cli/tests/instantaneous_piv/test_piv_integration_multi.py +553 -0
- pivtools_cli/tests/preprocessing/test_filters.py +41 -0
- pivtools_core/__init__.py +5 -0
- pivtools_core/config.py +703 -0
- pivtools_core/config.yaml +135 -0
- pivtools_core/image_handling/__init__.py +0 -0
- pivtools_core/image_handling/load_images.py +464 -0
- pivtools_core/image_handling/readers/__init__.py +53 -0
- pivtools_core/image_handling/readers/generic_readers.py +50 -0
- pivtools_core/image_handling/readers/lavision_reader.py +190 -0
- pivtools_core/image_handling/readers/registry.py +24 -0
- pivtools_core/paths.py +49 -0
- pivtools_core/vector_loading.py +248 -0
- pivtools_gui/__init__.py +3 -0
- pivtools_gui/app.py +687 -0
- pivtools_gui/calibration/__init__.py +0 -0
- pivtools_gui/calibration/app/__init__.py +0 -0
- pivtools_gui/calibration/app/views.py +1186 -0
- pivtools_gui/calibration/calibration_planar/planar_calibration_production.py +570 -0
- pivtools_gui/calibration/vector_calibration_production.py +544 -0
- pivtools_gui/config.py +703 -0
- pivtools_gui/image_handling/__init__.py +0 -0
- pivtools_gui/image_handling/load_images.py +464 -0
- pivtools_gui/image_handling/readers/__init__.py +53 -0
- pivtools_gui/image_handling/readers/generic_readers.py +50 -0
- pivtools_gui/image_handling/readers/lavision_reader.py +190 -0
- pivtools_gui/image_handling/readers/registry.py +24 -0
- pivtools_gui/masking/__init__.py +0 -0
- pivtools_gui/masking/app/__init__.py +0 -0
- pivtools_gui/masking/app/views.py +123 -0
- pivtools_gui/paths.py +49 -0
- pivtools_gui/piv_runner.py +261 -0
- pivtools_gui/pivtools.py +58 -0
- pivtools_gui/plotting/__init__.py +0 -0
- pivtools_gui/plotting/app/__init__.py +0 -0
- pivtools_gui/plotting/app/views.py +1671 -0
- pivtools_gui/plotting/plot_maker.py +220 -0
- pivtools_gui/post_processing/POD/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/__init__.py +0 -0
- pivtools_gui/post_processing/POD/app/views.py +647 -0
- pivtools_gui/post_processing/POD/pod_decompose.py +979 -0
- pivtools_gui/post_processing/POD/views.py +1096 -0
- pivtools_gui/post_processing/__init__.py +0 -0
- pivtools_gui/static/404.html +1 -0
- pivtools_gui/static/_next/static/chunks/117-d5793c8e79de5511.js +2 -0
- pivtools_gui/static/_next/static/chunks/484-cfa8b9348ce4f00e.js +1 -0
- pivtools_gui/static/_next/static/chunks/869-320a6b9bdafbb6d3.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/_not-found/page-12f067ceb7415e55.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/layout-b907d5f31ac82e9d.js +1 -0
- pivtools_gui/static/_next/static/chunks/app/page-334cc4e8444cde2f.js +1 -0
- pivtools_gui/static/_next/static/chunks/fd9d1056-ad15f396ddf9b7e5.js +1 -0
- pivtools_gui/static/_next/static/chunks/framework-f66176bb897dc684.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-a1b3ced4d5f6d998.js +1 -0
- pivtools_gui/static/_next/static/chunks/main-app-8a63c6f5e7baee11.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_app-72b849fbd24ac258.js +1 -0
- pivtools_gui/static/_next/static/chunks/pages/_error-7ba65e1336b92748.js +1 -0
- pivtools_gui/static/_next/static/chunks/polyfills-42372ed130431b0a.js +1 -0
- pivtools_gui/static/_next/static/chunks/webpack-4a8ca7c99e9bb3d8.js +1 -0
- pivtools_gui/static/_next/static/css/7d3f2337d7ea12a5.css +3 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_buildManifest.js +1 -0
- pivtools_gui/static/_next/static/vQeR20OUdSSKlK4vukC4q/_ssgManifest.js +1 -0
- pivtools_gui/static/file.svg +1 -0
- pivtools_gui/static/globe.svg +1 -0
- pivtools_gui/static/grid.svg +8 -0
- pivtools_gui/static/index.html +1 -0
- pivtools_gui/static/index.txt +8 -0
- pivtools_gui/static/next.svg +1 -0
- pivtools_gui/static/vercel.svg +1 -0
- pivtools_gui/static/window.svg +1 -0
- pivtools_gui/stereo_reconstruction/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/__init__.py +0 -0
- pivtools_gui/stereo_reconstruction/app/views.py +1985 -0
- pivtools_gui/stereo_reconstruction/stereo_calibration_production.py +606 -0
- pivtools_gui/stereo_reconstruction/stereo_reconstruction_production.py +544 -0
- pivtools_gui/utils.py +63 -0
- pivtools_gui/vector_loading.py +248 -0
- pivtools_gui/vector_merging/__init__.py +1 -0
- pivtools_gui/vector_merging/app/__init__.py +1 -0
- pivtools_gui/vector_merging/app/views.py +759 -0
- pivtools_gui/vector_statistics/app/__init__.py +1 -0
- pivtools_gui/vector_statistics/app/views.py +710 -0
- pivtools_gui/vector_statistics/ensemble_statistics.py +49 -0
- pivtools_gui/vector_statistics/instantaneous_statistics.py +311 -0
- pivtools_gui/video_maker/__init__.py +0 -0
- pivtools_gui/video_maker/app/__init__.py +0 -0
- pivtools_gui/video_maker/app/views.py +436 -0
- pivtools_gui/video_maker/video_maker.py +662 -0
pivtools_cli/piv/piv.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import sys
|
|
3
|
+
import numpy as np
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import List, Optional
|
|
6
|
+
from dask import array as da
|
|
7
|
+
from dask.distributed import Client
|
|
8
|
+
from pivtools_core.config import Config
|
|
9
|
+
|
|
10
|
+
from pivtools_cli.piv.piv_backend.factory import make_correlator_backend
|
|
11
|
+
from pivtools_cli.piv.piv_result import PIVResult
|
|
12
|
+
from pivtools_cli.piv.save_results import save_piv_result_distributed
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _batch_policy(config: Config) -> int:
|
|
16
|
+
if config.backend == "gpu":
|
|
17
|
+
# ~ (batch_size * H * W * bytes_per_pixel * intermediates)
|
|
18
|
+
# < GPU_RAM * safety
|
|
19
|
+
return 8 # e.g. start with 8 and tune
|
|
20
|
+
else:
|
|
21
|
+
return 1
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _process_and_save_single_pair(
|
|
25
|
+
image_pair: da.Array,
|
|
26
|
+
frame_number: int,
|
|
27
|
+
config: Config,
|
|
28
|
+
scattered_masks,
|
|
29
|
+
scattered_cache,
|
|
30
|
+
output_path: Path,
|
|
31
|
+
runs_to_save: Optional[List[int]],
|
|
32
|
+
vector_format: str,
|
|
33
|
+
) -> str:
|
|
34
|
+
"""
|
|
35
|
+
Combined PIV processing and saving for a single image pair.
|
|
36
|
+
|
|
37
|
+
This function is designed to be called via client.map() for efficient
|
|
38
|
+
batch submission of tasks. It combines PIV computation and saving into
|
|
39
|
+
a single atomic operation to reduce task graph complexity.
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
image_pair : da.Array
|
|
44
|
+
Dask array slice of shape (2, H, W) containing one image pair.
|
|
45
|
+
frame_number : int
|
|
46
|
+
Frame number (1-based) for output filename.
|
|
47
|
+
config : Config
|
|
48
|
+
Configuration object.
|
|
49
|
+
scattered_masks : Future or None
|
|
50
|
+
Scattered reference to vector masks.
|
|
51
|
+
scattered_cache : Future
|
|
52
|
+
Scattered reference to correlator cache.
|
|
53
|
+
output_path : Path
|
|
54
|
+
Directory where .mat file will be saved.
|
|
55
|
+
runs_to_save : Optional[List[int]]
|
|
56
|
+
List of pass indices (0-based) to save.
|
|
57
|
+
vector_format : str
|
|
58
|
+
Format string for output filenames.
|
|
59
|
+
|
|
60
|
+
Returns
|
|
61
|
+
-------
|
|
62
|
+
str
|
|
63
|
+
Path to the saved file.
|
|
64
|
+
"""
|
|
65
|
+
# Process PIV
|
|
66
|
+
piv_result = _piv_single_pass(image_pair, config, scattered_masks, scattered_cache)
|
|
67
|
+
|
|
68
|
+
# Save immediately to avoid accumulating results in memory
|
|
69
|
+
saved_path = save_piv_result_distributed(
|
|
70
|
+
piv_result, output_path, frame_number, runs_to_save, vector_format
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return saved_path
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def perform_piv_and_save(
|
|
77
|
+
images: da.Array,
|
|
78
|
+
config: Config,
|
|
79
|
+
client: Client,
|
|
80
|
+
output_path: Path,
|
|
81
|
+
start_frame: int = 1,
|
|
82
|
+
runs_to_save: Optional[List[int]] = None,
|
|
83
|
+
vector_masks: Optional[List[np.ndarray]] = None,
|
|
84
|
+
batch_size: int = None, # Deprecated, kept for compatibility
|
|
85
|
+
) -> List:
|
|
86
|
+
"""
|
|
87
|
+
Perform PIV and save results in parallel using TRUE lazy loading.
|
|
88
|
+
|
|
89
|
+
This is the OPTIMAL Dask pattern:
|
|
90
|
+
1. Images are already delayed tasks (from load_images)
|
|
91
|
+
2. We convert to delayed format and submit to workers
|
|
92
|
+
3. Each worker receives ONE delayed task at a time
|
|
93
|
+
4. Worker: load → process → save → free → next
|
|
94
|
+
5. No memory accumulation, no manual batching
|
|
95
|
+
|
|
96
|
+
Memory footprint per worker:
|
|
97
|
+
- 1 image pair: ~80 MB
|
|
98
|
+
- PIV processing: ~200 MB peak
|
|
99
|
+
- Total: ~280 MB (constant, regardless of total images!)
|
|
100
|
+
|
|
101
|
+
Scaling:
|
|
102
|
+
- 4 workers × 280 MB = ~1.1 GB total
|
|
103
|
+
- Can process 10,000 images with same memory!
|
|
104
|
+
- Main process: ~10 MB (just task graph)
|
|
105
|
+
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
images : da.Array
|
|
109
|
+
Dask array of shape (N, 2, H, W) containing image pairs.
|
|
110
|
+
Should be created by load_images() - already delayed!
|
|
111
|
+
config : Config
|
|
112
|
+
Configuration object.
|
|
113
|
+
client : Client
|
|
114
|
+
Dask distributed client.
|
|
115
|
+
output_path : Path
|
|
116
|
+
Directory where .mat files will be saved.
|
|
117
|
+
start_frame : int
|
|
118
|
+
Starting frame number (1-based) for filenames.
|
|
119
|
+
runs_to_save : Optional[List[int]]
|
|
120
|
+
List of pass indices (0-based) to save. If None, save all passes.
|
|
121
|
+
vector_masks : Optional[List[np.ndarray]]
|
|
122
|
+
Pre-computed vector masks for each PIV pass.
|
|
123
|
+
batch_size : int
|
|
124
|
+
DEPRECATED. Kept for compatibility but ignored.
|
|
125
|
+
Dask scheduler handles distribution automatically.
|
|
126
|
+
|
|
127
|
+
Returns
|
|
128
|
+
-------
|
|
129
|
+
tuple
|
|
130
|
+
(all_saved_paths, scattered_cache) where:
|
|
131
|
+
- all_saved_paths: List of paths to saved files
|
|
132
|
+
- scattered_cache: Scattered correlator cache (for coordinate saving)
|
|
133
|
+
"""
|
|
134
|
+
# Pre-compute correlator cache once to avoid redundant caching on workers
|
|
135
|
+
temp_correlator = make_correlator_backend(config)
|
|
136
|
+
correlator_cache = temp_correlator.get_cache_data()
|
|
137
|
+
|
|
138
|
+
# Broadcast cache to all workers once (efficient, happens once)
|
|
139
|
+
scattered_cache = client.scatter(correlator_cache, broadcast=True)
|
|
140
|
+
logging.info("Broadcast correlator cache to all workers")
|
|
141
|
+
|
|
142
|
+
scattered_masks = None
|
|
143
|
+
if vector_masks is not None:
|
|
144
|
+
scattered_masks = client.scatter(vector_masks, broadcast=True)
|
|
145
|
+
total_mask_size = sum(m.nbytes for m in vector_masks) / 1024
|
|
146
|
+
logging.info(f"Broadcast vector masks to all workers ({total_mask_size:.1f} KB)")
|
|
147
|
+
|
|
148
|
+
num_images = int(images.shape[0])
|
|
149
|
+
|
|
150
|
+
# Convert Dask array to delayed objects (still lazy!)
|
|
151
|
+
# This gives us individual delayed tasks, one per image
|
|
152
|
+
delayed_blocks = images.to_delayed().ravel()
|
|
153
|
+
|
|
154
|
+
# Prepare frame numbers
|
|
155
|
+
frame_numbers = list(range(start_frame, start_frame + num_images))
|
|
156
|
+
|
|
157
|
+
logging.info(f"Submitting {num_images} independent tasks to cluster")
|
|
158
|
+
|
|
159
|
+
# Use client.map() to submit all tasks at once
|
|
160
|
+
# Dask scheduler will distribute to workers efficiently
|
|
161
|
+
# Each worker will process tasks one-by-one as they become available
|
|
162
|
+
futures = client.map(
|
|
163
|
+
_process_and_save_single_pair,
|
|
164
|
+
delayed_blocks, # List of delayed tasks
|
|
165
|
+
frame_numbers,
|
|
166
|
+
config=config,
|
|
167
|
+
scattered_masks=scattered_masks,
|
|
168
|
+
scattered_cache=scattered_cache,
|
|
169
|
+
output_path=output_path,
|
|
170
|
+
runs_to_save=runs_to_save,
|
|
171
|
+
vector_format=config.vector_format,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
logging.info(
|
|
175
|
+
f"Tasks submitted. Dask scheduler managing distribution across workers. "
|
|
176
|
+
f"Each worker processes ONE image at a time."
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
# Gather results (this will block until all complete)
|
|
180
|
+
# Workers process in parallel but each holds only 1 image at a time
|
|
181
|
+
try:
|
|
182
|
+
all_saved_paths = client.gather(futures)
|
|
183
|
+
logging.info(f"All {num_images} images processed successfully")
|
|
184
|
+
except Exception as e:
|
|
185
|
+
logging.error(f"PIV processing failed: {e}")
|
|
186
|
+
raise
|
|
187
|
+
|
|
188
|
+
return all_saved_paths, scattered_cache
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
# def perform_piv(images: da.Array, config: Config, client: Client) -> List:
|
|
192
|
+
# """
|
|
193
|
+
# Perform PIV on a batch of image pairs.
|
|
194
|
+
|
|
195
|
+
# Parameters
|
|
196
|
+
# ----------
|
|
197
|
+
# images : da.Array
|
|
198
|
+
# Dask array of shape (N, 2, H, W) containing image pairs.
|
|
199
|
+
# config : Config
|
|
200
|
+
# Configuration object.
|
|
201
|
+
# client : Client
|
|
202
|
+
# Dask distributed client.
|
|
203
|
+
|
|
204
|
+
# Returns
|
|
205
|
+
# -------
|
|
206
|
+
# List
|
|
207
|
+
# List of Future objects that will resolve to PIVResult objects.
|
|
208
|
+
# Use client.gather() to collect results or simply iterate and
|
|
209
|
+
# call .result() on each future.
|
|
210
|
+
# """
|
|
211
|
+
# # Submit tasks to the cluster and return futures
|
|
212
|
+
# futures = []
|
|
213
|
+
# for i in range(images.shape[0]):
|
|
214
|
+
# block = images[i] # Get each block
|
|
215
|
+
# future = client.submit(_piv_single_pass, block, config)
|
|
216
|
+
# futures.append(future)
|
|
217
|
+
# return futures
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _piv_single_pass(
|
|
221
|
+
image_block: da.Array,
|
|
222
|
+
config: Config,
|
|
223
|
+
vector_masks: Optional[List[np.ndarray]] = None,
|
|
224
|
+
correlator_cache: Optional[dict] = None,
|
|
225
|
+
) -> PIVResult:
|
|
226
|
+
try:
|
|
227
|
+
image_block = image_block.compute()
|
|
228
|
+
if image_block.ndim == 3:
|
|
229
|
+
# Shape: (2, H, W)
|
|
230
|
+
image_block = image_block[np.newaxis, ...] # Shape: (1, 2, H, W)
|
|
231
|
+
correlator = make_correlator_backend(config, precomputed_cache=correlator_cache)
|
|
232
|
+
piv_results = correlator.correlate_batch(image_block, config=config, vector_masks=vector_masks)
|
|
233
|
+
except Exception as e:
|
|
234
|
+
# Return a PIVResult containing error information
|
|
235
|
+
error_result = PIVResult()
|
|
236
|
+
# We could add error information to the result if needed
|
|
237
|
+
logging.error(f"PIV processing failed: {str(e)}")
|
|
238
|
+
# For now, return an empty result to maintain consistent typing
|
|
239
|
+
return error_result
|
|
240
|
+
return piv_results
|