pyvale 2025.5.3__cp311-cp311-macosx_14_0_arm64.whl → 2025.7.0__cp311-cp311-macosx_14_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyvale might be problematic. Click here for more details.
- pyvale/.dylibs/libomp.dylib +0 -0
- pyvale/.dylibs/libunwind.1.0.dylib +0 -0
- pyvale/__init__.py +12 -0
- pyvale/blendercalibrationdata.py +3 -1
- pyvale/blenderscene.py +7 -5
- pyvale/blendertools.py +27 -5
- pyvale/camera.py +1 -0
- pyvale/cameradata.py +3 -0
- pyvale/camerasensor.py +147 -0
- pyvale/camerastereo.py +4 -4
- pyvale/cameratools.py +23 -61
- pyvale/cython/rastercyth.c +1657 -1352
- pyvale/cython/rastercyth.cpython-311-darwin.so +0 -0
- pyvale/cython/rastercyth.py +71 -26
- pyvale/data/plate_hole_def0000.tiff +0 -0
- pyvale/data/plate_hole_def0001.tiff +0 -0
- pyvale/data/plate_hole_ref0000.tiff +0 -0
- pyvale/data/plate_rigid_def0000.tiff +0 -0
- pyvale/data/plate_rigid_def0001.tiff +0 -0
- pyvale/data/plate_rigid_ref0000.tiff +0 -0
- pyvale/dataset.py +96 -6
- pyvale/dic/cpp/dicbruteforce.cpp +370 -0
- pyvale/dic/cpp/dicfourier.cpp +648 -0
- pyvale/dic/cpp/dicinterpolator.cpp +559 -0
- pyvale/dic/cpp/dicmain.cpp +215 -0
- pyvale/dic/cpp/dicoptimizer.cpp +675 -0
- pyvale/dic/cpp/dicrg.cpp +137 -0
- pyvale/dic/cpp/dicscanmethod.cpp +677 -0
- pyvale/dic/cpp/dicsmooth.cpp +138 -0
- pyvale/dic/cpp/dicstrain.cpp +383 -0
- pyvale/dic/cpp/dicutil.cpp +563 -0
- pyvale/dic2d.py +164 -0
- pyvale/dic2dcpp.cpython-311-darwin.so +0 -0
- pyvale/dicchecks.py +476 -0
- pyvale/dicdataimport.py +247 -0
- pyvale/dicregionofinterest.py +887 -0
- pyvale/dicresults.py +55 -0
- pyvale/dicspecklegenerator.py +238 -0
- pyvale/dicspecklequality.py +305 -0
- pyvale/dicstrain.py +387 -0
- pyvale/dicstrainresults.py +37 -0
- pyvale/errorintegrator.py +10 -8
- pyvale/examples/basics/ex1_1_basicscalars_therm2d.py +124 -113
- pyvale/examples/basics/ex1_2_sensormodel_therm2d.py +124 -132
- pyvale/examples/basics/ex1_3_customsens_therm3d.py +199 -195
- pyvale/examples/basics/ex1_4_basicerrors_therm3d.py +125 -121
- pyvale/examples/basics/ex1_5_fielderrs_therm3d.py +145 -141
- pyvale/examples/basics/ex1_6_caliberrs_therm2d.py +96 -101
- pyvale/examples/basics/ex1_7_spatavg_therm2d.py +109 -105
- pyvale/examples/basics/ex2_1_basicvectors_disp2d.py +92 -91
- pyvale/examples/basics/ex2_2_vectorsens_disp2d.py +96 -90
- pyvale/examples/basics/ex2_3_sensangle_disp2d.py +88 -89
- pyvale/examples/basics/ex2_4_chainfielderrs_disp2d.py +172 -171
- pyvale/examples/basics/ex2_5_vectorfields3d_disp3d.py +88 -86
- pyvale/examples/basics/ex3_1_basictensors_strain2d.py +90 -90
- pyvale/examples/basics/ex3_2_tensorsens2d_strain2d.py +93 -91
- pyvale/examples/basics/ex3_3_tensorsens3d_strain3d.py +172 -160
- pyvale/examples/basics/ex4_1_expsim2d_thermmech2d.py +154 -148
- pyvale/examples/basics/ex4_2_expsim3d_thermmech3d.py +249 -231
- pyvale/examples/dic/ex1_region_of_interest.py +98 -0
- pyvale/examples/dic/ex2_plate_with_hole.py +149 -0
- pyvale/examples/dic/ex3_plate_with_hole_strain.py +93 -0
- pyvale/examples/dic/ex4_dic_blender.py +95 -0
- pyvale/examples/dic/ex5_dic_challenge.py +102 -0
- pyvale/examples/imagedef2d/ex_imagedef2d_todisk.py +4 -2
- pyvale/examples/renderblender/ex1_1_blenderscene.py +152 -105
- pyvale/examples/renderblender/ex1_2_blenderdeformed.py +151 -100
- pyvale/examples/renderblender/ex2_1_stereoscene.py +183 -116
- pyvale/examples/renderblender/ex2_2_stereodeformed.py +185 -112
- pyvale/examples/renderblender/ex3_1_blendercalibration.py +164 -109
- pyvale/examples/renderrasterisation/ex_rastenp.py +74 -35
- pyvale/examples/renderrasterisation/ex_rastercyth_oneframe.py +6 -13
- pyvale/examples/renderrasterisation/ex_rastercyth_static_cypara.py +2 -2
- pyvale/examples/renderrasterisation/ex_rastercyth_static_pypara.py +2 -4
- pyvale/imagedef2d.py +3 -2
- pyvale/imagetools.py +137 -0
- pyvale/rastercy.py +34 -4
- pyvale/rasternp.py +300 -276
- pyvale/rasteropts.py +58 -0
- pyvale/renderer.py +47 -0
- pyvale/rendermesh.py +52 -62
- pyvale/renderscene.py +51 -0
- pyvale/sensorarrayfactory.py +2 -2
- pyvale/sensortools.py +19 -35
- pyvale/simcases/case21.i +1 -1
- pyvale/simcases/run_1case.py +8 -0
- pyvale/simtools.py +2 -2
- pyvale/visualsimplotter.py +180 -0
- {pyvale-2025.5.3.dist-info → pyvale-2025.7.0.dist-info}/METADATA +11 -57
- {pyvale-2025.5.3.dist-info → pyvale-2025.7.0.dist-info}/RECORD +93 -57
- {pyvale-2025.5.3.dist-info → pyvale-2025.7.0.dist-info}/WHEEL +1 -1
- pyvale/examples/visualisation/ex1_1_plot_traces.py +0 -102
- pyvale/examples/visualisation/ex2_1_animate_sim.py +0 -89
- {pyvale-2025.5.3.dist-info → pyvale-2025.7.0.dist-info}/licenses/LICENSE +0 -0
- {pyvale-2025.5.3.dist-info → pyvale-2025.7.0.dist-info}/top_level.txt +0 -0
pyvale/dicstrain.py
ADDED
|
@@ -0,0 +1,387 @@
|
|
|
1
|
+
# ================================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ================================================================================
|
|
6
|
+
|
|
7
|
+
import matplotlib.pyplot as plt
|
|
8
|
+
import numpy as np
|
|
9
|
+
import glob
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
|
|
12
|
+
from pyvale import dic2dcpp
|
|
13
|
+
from pyvale import dicchecks
|
|
14
|
+
from pyvale.dicdataimport import dic_data_import
|
|
15
|
+
from pyvale.dicresults import DICResults
|
|
16
|
+
from pyvale.dicstrainresults import StrainResults
|
|
17
|
+
|
|
18
|
+
def strain_2d(data: str | Path,
|
|
19
|
+
window_size: int=5,
|
|
20
|
+
window_element: int=4,
|
|
21
|
+
input_binary: bool=False,
|
|
22
|
+
input_delimiter: str=",",
|
|
23
|
+
output_basepath: Path | str="./",
|
|
24
|
+
output_binary: bool=False,
|
|
25
|
+
output_prefix: str="strain_",
|
|
26
|
+
output_delimiter: str=",",
|
|
27
|
+
output_at_end: bool=False,
|
|
28
|
+
strain_formulation: str="HENCKY"):
|
|
29
|
+
"""
|
|
30
|
+
Compute strain fields from DIC displacement data using a finite element smoothing approach.
|
|
31
|
+
|
|
32
|
+
This function validates the input data and parameters, optionally loads DIC results from file,
|
|
33
|
+
and passes the data to a C++-accelerated backend for strain computation.
|
|
34
|
+
|
|
35
|
+
Parameters
|
|
36
|
+
----------
|
|
37
|
+
data : pathlib.Path or str
|
|
38
|
+
A pathlib.Path or str to files from which the data should be imported.
|
|
39
|
+
input_delimiter: str
|
|
40
|
+
delimiter used for the input dic results files (default: ",").
|
|
41
|
+
input_binary bool:
|
|
42
|
+
whether input data is in human-readable or binary format (default:
|
|
43
|
+
False).
|
|
44
|
+
window_size : int, optional
|
|
45
|
+
The size of the local window over which to compute strain (must be odd), by default 5.
|
|
46
|
+
window_element : int, optional
|
|
47
|
+
The type of finite element shape function used in the strain window: 4 (bilinear) or 9 (biquadratic),
|
|
48
|
+
by default 4.
|
|
49
|
+
strain_formulation : str, optional
|
|
50
|
+
The strain definition to use: one of 'GREEN', 'ALMANSI', 'HENCKY', 'BIOT_EULER', 'BIOT_LAGRANGE'.
|
|
51
|
+
Defaults to 'HENCKY'.
|
|
52
|
+
output_basepath : str or pathlib.Path, optional
|
|
53
|
+
Directory path where output files will be written (default: "./").
|
|
54
|
+
output_binary : bool, optional
|
|
55
|
+
Whether to write output in binary format (default: False).
|
|
56
|
+
output_prefix : str, optional
|
|
57
|
+
Prefix for all output files (default: "strain_"). results will be
|
|
58
|
+
named with output_prefix + original filename. THe extension will be
|
|
59
|
+
changed to ".csv" or ".dic2d" depending on whether outputting as a binary.
|
|
60
|
+
output_delimiter : str, optional
|
|
61
|
+
Delimiter used in text output files (default: ",").
|
|
62
|
+
|
|
63
|
+
Raises
|
|
64
|
+
------
|
|
65
|
+
ValueError
|
|
66
|
+
If any of the input parameters are invalid (e.g., unsupported strain formulation,
|
|
67
|
+
even window size, or invalid element type).
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
allowed_formulations = ["GREEN", "ALMANSI", "HENCKY", "BIOT_EULER", "BIOT_LAGRANGE"]
|
|
71
|
+
if strain_formulation not in allowed_formulations:
|
|
72
|
+
raise ValueError(f"Invalid strain formulation: '{strain_formulation}'. "
|
|
73
|
+
f"Allowed values are: {', '.join(allowed_formulations)}.")
|
|
74
|
+
|
|
75
|
+
allowed_elements = [4, 9]
|
|
76
|
+
if window_element not in allowed_elements:
|
|
77
|
+
raise ValueError(f"Invalid strain window element type: Q{window_element}. "
|
|
78
|
+
f"Allowed values are: {', '.join(map(str, allowed_elements))}.")
|
|
79
|
+
|
|
80
|
+
if window_size % 2 == 0:
|
|
81
|
+
raise ValueError(f"Invalid strain window size: '{window_size}'. Must be an odd number.")
|
|
82
|
+
|
|
83
|
+
filenames = dicchecks.check_strain_files(strain_files=data)
|
|
84
|
+
|
|
85
|
+
# Load data if a file path is given
|
|
86
|
+
results = dic_data_import(layout="matrix", data=str(data),
|
|
87
|
+
binary=input_binary, delimiter=input_delimiter)
|
|
88
|
+
|
|
89
|
+
# Extract dimensions from the validated object
|
|
90
|
+
nss_x = results.ss_x.shape[1]
|
|
91
|
+
nss_y = results.ss_y.shape[0]
|
|
92
|
+
nimg = results.u.shape[0]
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
dicchecks.check_output_directory(str(output_basepath), output_prefix)
|
|
96
|
+
|
|
97
|
+
# assigning c++ struct vals for save config
|
|
98
|
+
strain_save_conf = dic2dcpp.SaveConfig()
|
|
99
|
+
strain_save_conf.basepath = str(output_basepath)
|
|
100
|
+
strain_save_conf.binary = output_binary
|
|
101
|
+
strain_save_conf.prefix = output_prefix
|
|
102
|
+
strain_save_conf.delimiter = output_delimiter
|
|
103
|
+
strain_save_conf.at_end = output_at_end
|
|
104
|
+
|
|
105
|
+
print(filenames)
|
|
106
|
+
|
|
107
|
+
# Call to C++ backend
|
|
108
|
+
dic2dcpp.strain_engine(results.ss_x, results.ss_y,
|
|
109
|
+
results.u, results.v,
|
|
110
|
+
nss_x, nss_y, nimg,
|
|
111
|
+
window_size, window_element,
|
|
112
|
+
strain_formulation, filenames,
|
|
113
|
+
strain_save_conf)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def strain_data_import(data: str | Path,
|
|
122
|
+
binary: bool = False,
|
|
123
|
+
layout: str = "matrix",
|
|
124
|
+
delimiter: str = " ") -> StrainResults:
|
|
125
|
+
"""
|
|
126
|
+
Import strain result data from human readable text or binary files.
|
|
127
|
+
|
|
128
|
+
Parameters
|
|
129
|
+
----------
|
|
130
|
+
|
|
131
|
+
data : str or pathlib.Path
|
|
132
|
+
Path pattern to the data files (can include wildcards). Default is "./".
|
|
133
|
+
|
|
134
|
+
layout : str, optional
|
|
135
|
+
Format of the output data layout: "column" (flat array per frame) or "matrix"
|
|
136
|
+
(reshaped grid per frame). Default is "column".
|
|
137
|
+
|
|
138
|
+
binary : bool, optional
|
|
139
|
+
If True, expects files in a specific binary format. If False, expects text data.
|
|
140
|
+
Default is False.
|
|
141
|
+
|
|
142
|
+
delimiter : str, optional
|
|
143
|
+
Delimiter used in text data files. Ignored if binary=True. Default is a single space.
|
|
144
|
+
|
|
145
|
+
Returns
|
|
146
|
+
-------
|
|
147
|
+
StrainResults
|
|
148
|
+
A named container with the following fields:
|
|
149
|
+
- window_x, window_y (grid arrays if layout=="matrix"; otherwise, 1D integer arrays)
|
|
150
|
+
- def_grad, eps (deformation gradient and strain arrays with shape depending on layout)
|
|
151
|
+
- filenames (python list)
|
|
152
|
+
|
|
153
|
+
Raises
|
|
154
|
+
------
|
|
155
|
+
ValueError:
|
|
156
|
+
If `layout` is not "column" or "matrix", or text data has insufficient columns,
|
|
157
|
+
or binary rows are malformed.
|
|
158
|
+
|
|
159
|
+
FileNotFoundError:
|
|
160
|
+
If no matching data files are found.
|
|
161
|
+
"""
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
print("")
|
|
167
|
+
print("Attempting Strain Data import...")
|
|
168
|
+
print("")
|
|
169
|
+
|
|
170
|
+
# convert to str
|
|
171
|
+
if isinstance(data, Path):
|
|
172
|
+
data = str(data)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
files = sorted(glob.glob(data))
|
|
176
|
+
filenames = files
|
|
177
|
+
if not files:
|
|
178
|
+
raise FileNotFoundError(f"No results found in: {data}")
|
|
179
|
+
|
|
180
|
+
print(f"Found {len(files)} files containing Strain results:")
|
|
181
|
+
for file in files:
|
|
182
|
+
print(f" - {file}")
|
|
183
|
+
print("")
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# Read first file to define reference coordinates
|
|
187
|
+
read_data = read_binary if binary else read_text
|
|
188
|
+
|
|
189
|
+
window_x_ref, window_y_ref, *fields = read_data(files[0], delimiter=delimiter)
|
|
190
|
+
frames = [list(fields)]
|
|
191
|
+
|
|
192
|
+
for file in files[1:]:
|
|
193
|
+
window_x, window_y, *f = read_data(file, delimiter)
|
|
194
|
+
if not (np.array_equal(window_x_ref, window_x) and
|
|
195
|
+
np.array_equal(window_y_ref, window_y)):
|
|
196
|
+
raise ValueError("Mismatch in coordinates across frames.")
|
|
197
|
+
frames.append(f)
|
|
198
|
+
|
|
199
|
+
# Stack fields into arrays
|
|
200
|
+
arrays = [np.stack([frame[i] for frame in frames]) for i in range(8)]
|
|
201
|
+
|
|
202
|
+
# if reading into a matrix layout we need to convert to meshgrids
|
|
203
|
+
if layout == "matrix":
|
|
204
|
+
|
|
205
|
+
# create meshgrid
|
|
206
|
+
x_unique = np.unique(window_x_ref)
|
|
207
|
+
y_unique = np.unique(window_y_ref)
|
|
208
|
+
X, Y = np.meshgrid(x_unique, y_unique)
|
|
209
|
+
|
|
210
|
+
# convert results to a grid based on meshgrid dims
|
|
211
|
+
shape = (len(files), len(y_unique), len(x_unique))
|
|
212
|
+
arrays = [to_grid(a,shape,window_x_ref, window_y_ref, x_unique,y_unique) for a in arrays]
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
# combine strain results into single np.ndarray. current dimeensions of
|
|
216
|
+
# each array are (file,x,y). The results will become
|
|
217
|
+
# (file,x,y,matrix_x,def_matrix_y)
|
|
218
|
+
current_shape = arrays[0].shape # (file,x,y)
|
|
219
|
+
def_grad = np.zeros(current_shape+(2,2))
|
|
220
|
+
eps = np.zeros(current_shape+(2,2))
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
def_grad[:,:,:,0,0] = arrays[0]
|
|
224
|
+
def_grad[:,:,:,0,1] = arrays[1]
|
|
225
|
+
def_grad[:,:,:,1,0] = arrays[2]
|
|
226
|
+
def_grad[:,:,:,1,1] = arrays[3]
|
|
227
|
+
eps[:,:,:,0,0] = arrays[4]
|
|
228
|
+
eps[:,:,:,0,1] = arrays[5]
|
|
229
|
+
eps[:,:,:,1,0] = arrays[6]
|
|
230
|
+
eps[:,:,:,1,1] = arrays[7]
|
|
231
|
+
|
|
232
|
+
return StrainResults(X, Y, def_grad, eps, filenames)
|
|
233
|
+
|
|
234
|
+
else:
|
|
235
|
+
current_shape = arrays[0].shape # (file,(x,y))
|
|
236
|
+
def_grad = np.zeros(current_shape+(2,2))
|
|
237
|
+
eps = np.zeros(current_shape+(2,2))
|
|
238
|
+
def_grad[:,:,0,0] = arrays[0]
|
|
239
|
+
def_grad[:,:,0,1] = arrays[1]
|
|
240
|
+
def_grad[:,:,1,0] = arrays[2]
|
|
241
|
+
def_grad[:,:,1,1] = arrays[3]
|
|
242
|
+
eps[:,:,0,0] = arrays[4]
|
|
243
|
+
eps[:,:,0,1] = arrays[5]
|
|
244
|
+
eps[:,:,1,0] = arrays[6]
|
|
245
|
+
eps[:,:,1,1] = arrays[7]
|
|
246
|
+
return StrainResults(window_x_ref, window_y_ref, def_grad, eps, filenames)
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def read_binary(file: str, delimiter: str):
|
|
251
|
+
"""
|
|
252
|
+
Read a binary Strain result file and extract DIC fields.
|
|
253
|
+
|
|
254
|
+
Assumes a fixed binary structure with each row containing:
|
|
255
|
+
- 2x int32 (subset coordinates)
|
|
256
|
+
- 8x float64 (deformation matrix, strain matrix)
|
|
257
|
+
|
|
258
|
+
Parameters
|
|
259
|
+
----------
|
|
260
|
+
file : str
|
|
261
|
+
Path to the binary result file.
|
|
262
|
+
|
|
263
|
+
delimiter : str
|
|
264
|
+
Ignored for binary data (included for API consistency).
|
|
265
|
+
|
|
266
|
+
Returns
|
|
267
|
+
-------
|
|
268
|
+
tuple of np.ndarray
|
|
269
|
+
Arrays corresponding to:
|
|
270
|
+
(window_x, window_y, def_grad, eps)
|
|
271
|
+
|
|
272
|
+
Raises
|
|
273
|
+
------
|
|
274
|
+
ValueError
|
|
275
|
+
If the binary file size does not align with expected row size.
|
|
276
|
+
"""
|
|
277
|
+
|
|
278
|
+
row_size = (3 * 4 + 6 * 8)
|
|
279
|
+
with open(file, "rb") as f:
|
|
280
|
+
raw = f.read()
|
|
281
|
+
if len(raw) % row_size != 0:
|
|
282
|
+
raise ValueError("Binary file has incomplete rows.")
|
|
283
|
+
|
|
284
|
+
rows = len(raw) // row_size
|
|
285
|
+
arr = np.frombuffer(raw, dtype=np.uint8).reshape(rows, row_size)
|
|
286
|
+
|
|
287
|
+
# lil function to make extracting a bit easier
|
|
288
|
+
def extract(col, dtype, start):
|
|
289
|
+
return np.frombuffer(arr[:, start:start+col], dtype=dtype)
|
|
290
|
+
|
|
291
|
+
window_x = extract(4, np.int32, 0)
|
|
292
|
+
window_y = extract(4, np.int32, 4)
|
|
293
|
+
def_grad00 = extract(8, np.float64, 8)
|
|
294
|
+
def_grad01 = extract(8, np.float64, 16)
|
|
295
|
+
def_grad10 = extract(8, np.float64, 24)
|
|
296
|
+
def_grad11 = extract(8, np.float64, 32)
|
|
297
|
+
eps00 = extract(8, np.float64, 40)
|
|
298
|
+
eps01 = extract(8, np.float64, 48)
|
|
299
|
+
eps10 = extract(8, np.float64, 56)
|
|
300
|
+
eps11 = extract(8, np.float64, 72)
|
|
301
|
+
|
|
302
|
+
return window_x, window_y, def_grad00, def_grad01, def_grad10, def_grad11, eps00, eps01, eps10, eps11
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def read_text(file: str, delimiter: str):
|
|
308
|
+
"""
|
|
309
|
+
Read a human-readable text DIC result file and extract DIC fields.
|
|
310
|
+
|
|
311
|
+
Expects at least 9 columns:
|
|
312
|
+
[ss_x, ss_y, u, v, m, cost, ftol, xtol, niter]
|
|
313
|
+
|
|
314
|
+
Parameters
|
|
315
|
+
----------
|
|
316
|
+
file : str
|
|
317
|
+
Path to the text result file.
|
|
318
|
+
|
|
319
|
+
delimiter : str
|
|
320
|
+
Delimiter used in the text file (e.g., space, tab, comma).
|
|
321
|
+
|
|
322
|
+
Returns
|
|
323
|
+
-------
|
|
324
|
+
tuple of np.ndarray
|
|
325
|
+
Arrays corresponding to:
|
|
326
|
+
(ss_x, ss_y, u, v, m, cost, ftol, xtol, niter)
|
|
327
|
+
|
|
328
|
+
Raises
|
|
329
|
+
------
|
|
330
|
+
ValueError
|
|
331
|
+
If the text file has fewer than 9 columns.
|
|
332
|
+
"""
|
|
333
|
+
|
|
334
|
+
data = np.loadtxt(file, delimiter=delimiter, skiprows=1)
|
|
335
|
+
if data.shape[1] < 9:
|
|
336
|
+
raise ValueError("Text data must have at least 9 columns.")
|
|
337
|
+
return (
|
|
338
|
+
data[:, 0].astype(np.int32), # window_x
|
|
339
|
+
data[:, 1].astype(np.int32), # window_y
|
|
340
|
+
data[:, 2], data[:, 3], data[:, 4], data[:, 5], #def_grad
|
|
341
|
+
data[:, 6], data[:, 7], data[:, 8], data[:, 9] #eps
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
|
|
347
|
+
|
|
348
|
+
|
|
349
|
+
def to_grid(data, shape, ss_x_ref, ss_y_ref, x_unique, y_unique):
|
|
350
|
+
"""
|
|
351
|
+
Reshape a 2D DIC field from flat (column) format into grid (matrix) format.
|
|
352
|
+
|
|
353
|
+
This is used when output layout is specified as "matrix".
|
|
354
|
+
Maps values using reference subset coordinates (ss_x_ref, ss_y_ref).
|
|
355
|
+
|
|
356
|
+
Parameters
|
|
357
|
+
----------
|
|
358
|
+
data : np.ndarray
|
|
359
|
+
Array of shape (n_frames, n_points) to be reshaped into (n_frames, height, width).
|
|
360
|
+
|
|
361
|
+
shape : tuple
|
|
362
|
+
Target shape of output array: (n_frames, height, width).
|
|
363
|
+
|
|
364
|
+
ss_x_ref : np.ndarray
|
|
365
|
+
X coordinates of subset centers.
|
|
366
|
+
|
|
367
|
+
ss_y_ref : np.ndarray
|
|
368
|
+
Y coordinates of subset centers.
|
|
369
|
+
|
|
370
|
+
x_unique : np.ndarray
|
|
371
|
+
Sorted unique X coordinates in the grid.
|
|
372
|
+
|
|
373
|
+
y_unique : np.ndarray
|
|
374
|
+
Sorted unique Y coordinates in the grid.
|
|
375
|
+
|
|
376
|
+
Returns
|
|
377
|
+
-------
|
|
378
|
+
np.ndarray
|
|
379
|
+
Reshaped array with shape `shape`, filled with NaNs where no data exists.
|
|
380
|
+
"""
|
|
381
|
+
|
|
382
|
+
grid = np.full(shape, np.nan)
|
|
383
|
+
for i, (x, y) in enumerate(zip(ss_x_ref, ss_y_ref)):
|
|
384
|
+
x_idx = np.where(x_unique == x)[0][0]
|
|
385
|
+
y_idx = np.where(y_unique == y)[0][0]
|
|
386
|
+
grid[:, y_idx, x_idx] = data[:, i]
|
|
387
|
+
return grid
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
# ================================================================================
|
|
2
|
+
# pyvale: the python validation engine
|
|
3
|
+
# License: MIT
|
|
4
|
+
# Copyright (C) 2025 The Computer Aided Validation Team
|
|
5
|
+
# ================================================================================
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
import numpy as np
|
|
10
|
+
|
|
11
|
+
@dataclass(slots=True)
|
|
12
|
+
class StrainResults:
|
|
13
|
+
"""
|
|
14
|
+
Data container for Strain analysis results.
|
|
15
|
+
|
|
16
|
+
This dataclass stores the strain window coordinates, deformation gradient
|
|
17
|
+
and strain values.
|
|
18
|
+
|
|
19
|
+
Attributes
|
|
20
|
+
----------
|
|
21
|
+
window_x : np.ndarray
|
|
22
|
+
The x-coordinates of the strain window centre.
|
|
23
|
+
window_y : np.ndarray
|
|
24
|
+
The y-coordinates of the strain window centre.
|
|
25
|
+
def_grad : np.ndarray
|
|
26
|
+
The 2D deformation gradient.
|
|
27
|
+
eps : np.ndarray
|
|
28
|
+
The 2D strain tensor.
|
|
29
|
+
filenames : list[str]
|
|
30
|
+
name of Strain result files that have been found
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
window_x: np.ndarray
|
|
34
|
+
window_y: np.ndarray
|
|
35
|
+
def_grad: np.ndarray
|
|
36
|
+
eps: np.ndarray
|
|
37
|
+
filenames: list[str]
|
pyvale/errorintegrator.py
CHANGED
|
@@ -177,14 +177,16 @@ class ErrIntegrator:
|
|
|
177
177
|
Array of total errors summed over all errors in the chain. shape=(
|
|
178
178
|
num_sensors,num_field_components,num_time_steps).
|
|
179
179
|
"""
|
|
180
|
-
|
|
180
|
+
self._errs_total = np.zeros_like(truth)
|
|
181
|
+
self._errs_systematic = np.zeros_like(truth)
|
|
182
|
+
self._errs_random = np.zeros_like(truth)
|
|
181
183
|
self._errs_by_chain = np.zeros((len(self._err_chain),) + \
|
|
182
184
|
self._meas_shape)
|
|
183
185
|
|
|
184
186
|
for ii,ee in enumerate(self._err_chain):
|
|
185
187
|
|
|
186
188
|
if ee.get_error_dep() == EErrDep.DEPENDENT:
|
|
187
|
-
(error_array,sens_data) = ee.calc_errs(truth+
|
|
189
|
+
(error_array,sens_data) = ee.calc_errs(truth+self._errs_total,
|
|
188
190
|
self._sens_data_accumulated)
|
|
189
191
|
|
|
190
192
|
else:
|
|
@@ -199,10 +201,9 @@ class ErrIntegrator:
|
|
|
199
201
|
else:
|
|
200
202
|
self._errs_random = self._errs_random + error_array
|
|
201
203
|
|
|
202
|
-
|
|
204
|
+
self._errs_total = self._errs_total + error_array
|
|
203
205
|
self._errs_by_chain[ii,:,:,:] = error_array
|
|
204
206
|
|
|
205
|
-
self._errs_total = accumulated_error
|
|
206
207
|
return self._errs_total
|
|
207
208
|
|
|
208
209
|
|
|
@@ -226,13 +227,15 @@ class ErrIntegrator:
|
|
|
226
227
|
Array of total errors summed over all errors in the chain. shape=(
|
|
227
228
|
num_sensors,num_field_components,num_time_steps).
|
|
228
229
|
"""
|
|
229
|
-
|
|
230
|
+
self._errs_total = np.zeros_like(truth)
|
|
231
|
+
self._errs_systematic = np.zeros_like(truth)
|
|
232
|
+
self._errs_random = np.zeros_like(truth)
|
|
230
233
|
|
|
231
234
|
for ee in self._err_chain:
|
|
232
235
|
|
|
233
236
|
if ee.get_error_dep() == EErrDep.DEPENDENT:
|
|
234
237
|
(error_array,sens_data) = ee.calc_errs(
|
|
235
|
-
truth+
|
|
238
|
+
truth+self._errs_total,
|
|
236
239
|
self._sens_data_accumulated
|
|
237
240
|
)
|
|
238
241
|
else:
|
|
@@ -246,9 +249,8 @@ class ErrIntegrator:
|
|
|
246
249
|
else:
|
|
247
250
|
self._errs_random = self._errs_random + error_array
|
|
248
251
|
|
|
249
|
-
|
|
252
|
+
self._errs_total = self._errs_total + error_array
|
|
250
253
|
|
|
251
|
-
self._errs_total = accumulated_error
|
|
252
254
|
return self._errs_total
|
|
253
255
|
|
|
254
256
|
|