rock-physics-open 0.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rock_physics_open/__init__.py +0 -0
- rock_physics_open/equinor_utilities/__init__.py +0 -0
- rock_physics_open/equinor_utilities/anisotropy.py +211 -0
- rock_physics_open/equinor_utilities/classification_functions/__init__.py +17 -0
- rock_physics_open/equinor_utilities/classification_functions/class_stats.py +68 -0
- rock_physics_open/equinor_utilities/classification_functions/lin_class.py +53 -0
- rock_physics_open/equinor_utilities/classification_functions/mahal_class.py +63 -0
- rock_physics_open/equinor_utilities/classification_functions/norm_class.py +73 -0
- rock_physics_open/equinor_utilities/classification_functions/poly_class.py +45 -0
- rock_physics_open/equinor_utilities/classification_functions/post_prob.py +27 -0
- rock_physics_open/equinor_utilities/classification_functions/two_step_classification.py +60 -0
- rock_physics_open/equinor_utilities/conversions.py +10 -0
- rock_physics_open/equinor_utilities/gen_utilities/__init__.py +11 -0
- rock_physics_open/equinor_utilities/gen_utilities/dict_to_float.py +38 -0
- rock_physics_open/equinor_utilities/gen_utilities/dim_check_vector.py +113 -0
- rock_physics_open/equinor_utilities/gen_utilities/filter_input.py +131 -0
- rock_physics_open/equinor_utilities/gen_utilities/filter_output.py +88 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/__init__.py +15 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/base_pressure_model.py +170 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/dummy_vars.py +53 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/exponential_model.py +137 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/import_ml_models.py +77 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/polynomial_model.py +132 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/run_regression.py +209 -0
- rock_physics_open/equinor_utilities/machine_learning_utilities/sigmoidal_model.py +241 -0
- rock_physics_open/equinor_utilities/optimisation_utilities/__init__.py +19 -0
- rock_physics_open/equinor_utilities/optimisation_utilities/opt_subst_utilities.py +455 -0
- rock_physics_open/equinor_utilities/snapshot_test_utilities/__init__.py +10 -0
- rock_physics_open/equinor_utilities/snapshot_test_utilities/compare_snapshots.py +184 -0
- rock_physics_open/equinor_utilities/snapshot_test_utilities/snapshots.py +97 -0
- rock_physics_open/equinor_utilities/std_functions/__init__.py +43 -0
- rock_physics_open/equinor_utilities/std_functions/backus_ave.py +68 -0
- rock_physics_open/equinor_utilities/std_functions/dvorkin_nur.py +77 -0
- rock_physics_open/equinor_utilities/std_functions/gassmann.py +165 -0
- rock_physics_open/equinor_utilities/std_functions/hashin_shtrikman.py +224 -0
- rock_physics_open/equinor_utilities/std_functions/hertz_mindlin.py +51 -0
- rock_physics_open/equinor_utilities/std_functions/moduli_velocity.py +67 -0
- rock_physics_open/equinor_utilities/std_functions/reflection_eq.py +120 -0
- rock_physics_open/equinor_utilities/std_functions/rho.py +69 -0
- rock_physics_open/equinor_utilities/std_functions/voigt_reuss_hill.py +149 -0
- rock_physics_open/equinor_utilities/std_functions/walton.py +45 -0
- rock_physics_open/equinor_utilities/std_functions/wood_brie.py +94 -0
- rock_physics_open/equinor_utilities/various_utilities/Equinor_logo.gif +0 -0
- rock_physics_open/equinor_utilities/various_utilities/Equinor_logo.ico +0 -0
- rock_physics_open/equinor_utilities/various_utilities/__init__.py +24 -0
- rock_physics_open/equinor_utilities/various_utilities/display_result_statistics.py +90 -0
- rock_physics_open/equinor_utilities/various_utilities/gassmann_dry_mod.py +56 -0
- rock_physics_open/equinor_utilities/various_utilities/gassmann_mod.py +56 -0
- rock_physics_open/equinor_utilities/various_utilities/gassmann_sub_mod.py +64 -0
- rock_physics_open/equinor_utilities/various_utilities/hs_average.py +59 -0
- rock_physics_open/equinor_utilities/various_utilities/pressure.py +96 -0
- rock_physics_open/equinor_utilities/various_utilities/reflectivity.py +101 -0
- rock_physics_open/equinor_utilities/various_utilities/timeshift.py +104 -0
- rock_physics_open/equinor_utilities/various_utilities/vp_vs_rho_set_statistics.py +170 -0
- rock_physics_open/equinor_utilities/various_utilities/vrh_3_min.py +83 -0
- rock_physics_open/fluid_models/__init__.py +9 -0
- rock_physics_open/fluid_models/brine_model/__init__.py +5 -0
- rock_physics_open/fluid_models/brine_model/brine_properties.py +178 -0
- rock_physics_open/fluid_models/gas_model/__init__.py +5 -0
- rock_physics_open/fluid_models/gas_model/gas_properties.py +319 -0
- rock_physics_open/fluid_models/oil_model/__init__.py +5 -0
- rock_physics_open/fluid_models/oil_model/dead_oil_density.py +65 -0
- rock_physics_open/fluid_models/oil_model/dead_oil_velocity.py +30 -0
- rock_physics_open/fluid_models/oil_model/live_oil_density.py +82 -0
- rock_physics_open/fluid_models/oil_model/live_oil_velocity.py +24 -0
- rock_physics_open/fluid_models/oil_model/oil_bubble_point.py +69 -0
- rock_physics_open/fluid_models/oil_model/oil_properties.py +146 -0
- rock_physics_open/sandstone_models/__init__.py +59 -0
- rock_physics_open/sandstone_models/cemented_shalysand_sandyshale_models.py +304 -0
- rock_physics_open/sandstone_models/constant_cement_models.py +204 -0
- rock_physics_open/sandstone_models/constant_cement_optimisation.py +125 -0
- rock_physics_open/sandstone_models/contact_cement_model.py +138 -0
- rock_physics_open/sandstone_models/curvefit_sandstone_models.py +143 -0
- rock_physics_open/sandstone_models/friable_models.py +177 -0
- rock_physics_open/sandstone_models/friable_optimisation.py +115 -0
- rock_physics_open/sandstone_models/friable_shalysand_sandyshale_models.py +235 -0
- rock_physics_open/sandstone_models/patchy_cement_fluid_substitution_model.py +477 -0
- rock_physics_open/sandstone_models/patchy_cement_model.py +384 -0
- rock_physics_open/sandstone_models/patchy_cement_optimisation.py +254 -0
- rock_physics_open/sandstone_models/unresolved_cemented_sandshale_models.py +134 -0
- rock_physics_open/sandstone_models/unresolved_friable_sandshale_models.py +126 -0
- rock_physics_open/shale_models/__init__.py +19 -0
- rock_physics_open/shale_models/dem.py +174 -0
- rock_physics_open/shale_models/dem_dual_por.py +61 -0
- rock_physics_open/shale_models/kus_tok.py +59 -0
- rock_physics_open/shale_models/multi_sca.py +133 -0
- rock_physics_open/shale_models/pq.py +102 -0
- rock_physics_open/shale_models/sca.py +90 -0
- rock_physics_open/shale_models/shale4_mineral.py +147 -0
- rock_physics_open/shale_models/shale4_mineral_dem_overlay.py +92 -0
- rock_physics_open/span_wagner/__init__.py +5 -0
- rock_physics_open/span_wagner/co2_properties.py +444 -0
- rock_physics_open/span_wagner/coefficients.py +165 -0
- rock_physics_open/span_wagner/equations.py +104 -0
- rock_physics_open/span_wagner/tables/__init__.py +0 -0
- rock_physics_open/span_wagner/tables/carbon_dioxide_density.npz +0 -0
- rock_physics_open/span_wagner/tables/lookup_table.py +33 -0
- rock_physics_open/t_matrix_models/Equinor_logo.ico +0 -0
- rock_physics_open/t_matrix_models/__init__.py +35 -0
- rock_physics_open/t_matrix_models/carbonate_pressure_substitution.py +124 -0
- rock_physics_open/t_matrix_models/curvefit_t_matrix_exp.py +123 -0
- rock_physics_open/t_matrix_models/curvefit_t_matrix_min.py +86 -0
- rock_physics_open/t_matrix_models/parse_t_matrix_inputs.py +297 -0
- rock_physics_open/t_matrix_models/run_t_matrix.py +243 -0
- rock_physics_open/t_matrix_models/t_matrix_C.py +210 -0
- rock_physics_open/t_matrix_models/t_matrix_opt_fluid_sub_exp.py +137 -0
- rock_physics_open/t_matrix_models/t_matrix_opt_fluid_sub_petec.py +167 -0
- rock_physics_open/t_matrix_models/t_matrix_opt_forward_model_exp.py +76 -0
- rock_physics_open/t_matrix_models/t_matrix_opt_forward_model_min.py +89 -0
- rock_physics_open/t_matrix_models/t_matrix_parameter_optimisation_exp.py +176 -0
- rock_physics_open/t_matrix_models/t_matrix_parameter_optimisation_min.py +162 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/__init__.py +12 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/array_functions.py +75 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_c_eff.py +163 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_isolated.py +95 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_kd.py +40 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_kd_eff.py +116 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_kd_uuv.py +18 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_pressure.py +140 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_t.py +71 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_td.py +42 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_theta.py +43 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_x.py +33 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/calc_z.py +50 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/check_and_tile.py +43 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/g_tensor.py +140 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/iso_av.py +60 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/iso_ave_all.py +55 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/pressure_input.py +44 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/t_matrix_vec.py +278 -0
- rock_physics_open/t_matrix_models/t_matrix_vector/velocity_vti_angles.py +81 -0
- rock_physics_open/t_matrix_models/tmatrix_python.dll +0 -0
- rock_physics_open/t_matrix_models/tmatrix_python.so +0 -0
- rock_physics_open/ternary_plots/__init__.py +3 -0
- rock_physics_open/ternary_plots/gen_ternary_plot.py +73 -0
- rock_physics_open/ternary_plots/shale_prop_ternary.py +337 -0
- rock_physics_open/ternary_plots/ternary_patches.py +277 -0
- rock_physics_open/ternary_plots/ternary_plot_utilities.py +197 -0
- rock_physics_open/ternary_plots/unconventionals_ternary.py +75 -0
- rock_physics_open/version.py +34 -0
- rock_physics_open-0.3.2.dist-info/METADATA +90 -0
- rock_physics_open-0.3.2.dist-info/RECORD +145 -0
- rock_physics_open-0.3.2.dist-info/WHEEL +5 -0
- rock_physics_open-0.3.2.dist-info/licenses/LICENSE +165 -0
- rock_physics_open-0.3.2.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import pickle
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, Callable, Literal, cast
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import numpy.typing as npt
|
|
9
|
+
from scipy.optimize import curve_fit
|
|
10
|
+
|
|
11
|
+
OptCallable = Callable[[npt.NDArray[np.float64]], npt.NDArray[np.float64]]
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def curve_fit_wrapper(
|
|
15
|
+
x_init: npt.NDArray[np.float64],
|
|
16
|
+
opt_func: OptCallable,
|
|
17
|
+
x_data: npt.NDArray[np.float64],
|
|
18
|
+
y_data: npt.NDArray[np.float64],
|
|
19
|
+
*args: Any,
|
|
20
|
+
**opt_kwargs: Any,
|
|
21
|
+
) -> npt.NDArray[np.float64]:
|
|
22
|
+
"""Use in tests with scipy.optimize.minimize instead of curve_fit.
|
|
23
|
+
|
|
24
|
+
Parameters
|
|
25
|
+
----------
|
|
26
|
+
x_init : np.ndarray
|
|
27
|
+
Initial guess for parameters.
|
|
28
|
+
opt_func : callable
|
|
29
|
+
Function to optimize.
|
|
30
|
+
x_data : np.ndarray
|
|
31
|
+
Input data to opt_func.
|
|
32
|
+
y_data : np.ndarray
|
|
33
|
+
Results that the optimisation should match.
|
|
34
|
+
args :
|
|
35
|
+
args opt_func.
|
|
36
|
+
opt_kwargs :
|
|
37
|
+
kwargs opt_func.
|
|
38
|
+
|
|
39
|
+
Returns
|
|
40
|
+
-------
|
|
41
|
+
np.ndarray
|
|
42
|
+
res values.
|
|
43
|
+
"""
|
|
44
|
+
y_pred = opt_func(x_data, *x_init, *args, **opt_kwargs)
|
|
45
|
+
return np.sum(np.sqrt((y_data - y_pred) ** 2))
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def gen_opt_routine(
|
|
49
|
+
opt_function: OptCallable,
|
|
50
|
+
x_data_orig: npt.NDArray[np.float64],
|
|
51
|
+
y_data: npt.NDArray[np.float64],
|
|
52
|
+
x_init: npt.NDArray[np.float64],
|
|
53
|
+
low_bound: npt.NDArray[np.float64],
|
|
54
|
+
high_bound: npt.NDArray[np.float64],
|
|
55
|
+
**opt_kwargs: Any,
|
|
56
|
+
) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64], npt.NDArray[np.float64]]:
|
|
57
|
+
"""
|
|
58
|
+
This function is a lean method for running optimisation with the given opt_function in curve_fit. Predicted values,
|
|
59
|
+
residuals to the observed values and optimal parameters are returned.
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
----------
|
|
63
|
+
opt_function : callable
|
|
64
|
+
function to optimise
|
|
65
|
+
x_data_orig : np.ndarray
|
|
66
|
+
input data to the function - independent variables
|
|
67
|
+
y_data : np.ndarray
|
|
68
|
+
results that the optimisation should match - dependent variables
|
|
69
|
+
x_init : np.ndarray
|
|
70
|
+
initial guess for parameters
|
|
71
|
+
low_bound : np.ndarray
|
|
72
|
+
parameter low bound
|
|
73
|
+
high_bound : np.ndarray
|
|
74
|
+
parameter high bound
|
|
75
|
+
opt_kwargs : dict
|
|
76
|
+
optional meta-parameters to the optimisation function
|
|
77
|
+
|
|
78
|
+
Returns
|
|
79
|
+
-------
|
|
80
|
+
tuple
|
|
81
|
+
y_pred, y_res, opt_params : (np.ndarray, np.ndarray, np.ndarray).
|
|
82
|
+
y_pred : predicted values,
|
|
83
|
+
y_res : residual values,
|
|
84
|
+
opt_params : optimal model parameters.
|
|
85
|
+
"""
|
|
86
|
+
try:
|
|
87
|
+
opt_params, _ = cast(
|
|
88
|
+
tuple[npt.NDArray[np.float64], Any],
|
|
89
|
+
curve_fit(
|
|
90
|
+
opt_function,
|
|
91
|
+
x_data_orig,
|
|
92
|
+
y_data.flatten("F"),
|
|
93
|
+
x_init,
|
|
94
|
+
bounds=(low_bound, high_bound),
|
|
95
|
+
method="trf",
|
|
96
|
+
loss="soft_l1",
|
|
97
|
+
**opt_kwargs,
|
|
98
|
+
),
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
except ValueError:
|
|
102
|
+
raise ValueError(
|
|
103
|
+
"gen_opt_routine: failed in optimisation step: {}".format(
|
|
104
|
+
str(sys.exc_info())
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
else:
|
|
108
|
+
y_pred = np.reshape(
|
|
109
|
+
opt_function(x_data_orig, *opt_params), y_data.shape, order="F"
|
|
110
|
+
)
|
|
111
|
+
y_res = y_pred - y_data
|
|
112
|
+
|
|
113
|
+
# Alternative implementation, not shown to improve results
|
|
114
|
+
# alt_opt_params = minimize(curve_fit_wrapper, x_init, args=(opt_function, x_data_orig, y_data.flatten('F')),
|
|
115
|
+
# bounds=Bounds(low_bound, high_bound), method='SLSQP', options={'maxiter': 10000})
|
|
116
|
+
# y_pred_1 = np.reshape(opt_function(x_data_orig, *alt_opt_params['x'], **opt_kwargs), y_data.shape, order='F')
|
|
117
|
+
# y_res_1 = y_pred_1 - y_data
|
|
118
|
+
# return y_pred_1, y_res_1, alt_opt_params['x']
|
|
119
|
+
|
|
120
|
+
return y_pred, y_res, opt_params
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def gen_mod_routine(
|
|
124
|
+
opt_function: OptCallable,
|
|
125
|
+
xdata_orig: npt.NDArray[np.float64],
|
|
126
|
+
ydata_shape: tuple[int, int],
|
|
127
|
+
opt_params: npt.NDArray[np.float64],
|
|
128
|
+
) -> npt.NDArray[np.float64]:
|
|
129
|
+
"""Predict modelled values based on an earlier optimisation run for optimal model parameters.
|
|
130
|
+
|
|
131
|
+
Parameters
|
|
132
|
+
----------
|
|
133
|
+
opt_function : callable
|
|
134
|
+
Function to optimise.
|
|
135
|
+
xdata_orig : np.ndarray
|
|
136
|
+
Input data to the function - independent variables.
|
|
137
|
+
ydata_shape : (int, int)
|
|
138
|
+
Shape of y_data.
|
|
139
|
+
opt_params : np.ndarray
|
|
140
|
+
Optimal model parameters.
|
|
141
|
+
|
|
142
|
+
Returns
|
|
143
|
+
-------
|
|
144
|
+
np.ndarray
|
|
145
|
+
Predicted values.
|
|
146
|
+
"""
|
|
147
|
+
# Estimation of values
|
|
148
|
+
return np.reshape(opt_function(xdata_orig, *opt_params), ydata_shape, order="F")
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def gen_sub_routine(
|
|
152
|
+
opt_function: OptCallable,
|
|
153
|
+
xdata_orig: npt.NDArray[np.float64],
|
|
154
|
+
xdata_new: npt.NDArray[np.float64],
|
|
155
|
+
ydata: npt.NDArray[np.float64],
|
|
156
|
+
opt_params: npt.NDArray[np.float64],
|
|
157
|
+
) -> tuple[npt.NDArray[np.float64], npt.NDArray[np.float64], npt.NDArray[np.float64]]:
|
|
158
|
+
"""General substitution function based on a calibrated/optimised model and with two sets of input parameters.
|
|
159
|
+
The substituted values are calculated as the original observations plus the difference of the two modelling
|
|
160
|
+
steps.
|
|
161
|
+
|
|
162
|
+
Parameters
|
|
163
|
+
----------
|
|
164
|
+
opt_function : callable
|
|
165
|
+
Function to optimise.
|
|
166
|
+
xdata_orig : np.ndarray
|
|
167
|
+
Input data to the function step 1 - independent variables.
|
|
168
|
+
xdata_new : np.ndarray
|
|
169
|
+
Input data to the function step 2 - independent variables.
|
|
170
|
+
ydata : np.ndarray
|
|
171
|
+
Original observed values step 1.
|
|
172
|
+
opt_params : np.ndarray
|
|
173
|
+
Set of optimal parameters to model.
|
|
174
|
+
|
|
175
|
+
Returns
|
|
176
|
+
-------
|
|
177
|
+
tuple
|
|
178
|
+
y_final, y_pred, y_res : (np.ndarray, np.ndarray, np.ndarray).
|
|
179
|
+
Original observed data + difference in estimation between steps 0 and 1, y_pred - modelled data,
|
|
180
|
+
y_res - residuals, opt_params - best parameter setting.
|
|
181
|
+
"""
|
|
182
|
+
# Estimation of initial values
|
|
183
|
+
y_pred = np.reshape(opt_function(xdata_orig, *opt_params), ydata.shape, order="F")
|
|
184
|
+
# Estimation step for substituted fluid properties
|
|
185
|
+
y_subst = np.reshape(opt_function(xdata_new, *opt_params), ydata.shape, "F")
|
|
186
|
+
|
|
187
|
+
y_res = y_pred - ydata
|
|
188
|
+
y_diff = y_subst - y_pred
|
|
189
|
+
y_final = ydata + y_diff
|
|
190
|
+
|
|
191
|
+
return y_final, y_pred, y_res
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def save_opt_params(
|
|
195
|
+
opt_type: str,
|
|
196
|
+
opt_params: np.ndarray,
|
|
197
|
+
file_name: str = "opt_params.pkl",
|
|
198
|
+
well_name: str = "Unknown well",
|
|
199
|
+
):
|
|
200
|
+
"""
|
|
201
|
+
Utility to save optimal parameters as a pickle file in a more readable format so that the optimisation method can be
|
|
202
|
+
recognised.
|
|
203
|
+
|
|
204
|
+
Parameters
|
|
205
|
+
----------
|
|
206
|
+
opt_type : str
|
|
207
|
+
String defining optimisation type.
|
|
208
|
+
opt_params : np.ndarray
|
|
209
|
+
Numpy array with parameters from optimisation.
|
|
210
|
+
file_name : str, optional
|
|
211
|
+
File to save results to, by default 'opt_params.pkl'.
|
|
212
|
+
well_name : str, optional
|
|
213
|
+
Name of the well which is used in optimisation, by default 'Unknown well'.
|
|
214
|
+
|
|
215
|
+
Raises
|
|
216
|
+
------
|
|
217
|
+
ValueError
|
|
218
|
+
If unknown optimisation opt_type.
|
|
219
|
+
"""
|
|
220
|
+
# Save the optimal parameters with info
|
|
221
|
+
if opt_type == "min": # optimisation with mineral input from well
|
|
222
|
+
opt_param_dict = {
|
|
223
|
+
"well_name": well_name,
|
|
224
|
+
"opt_ver": opt_type,
|
|
225
|
+
"f_ani": opt_params[0],
|
|
226
|
+
"f_con": opt_params[1],
|
|
227
|
+
"alpha_opt": opt_params[2:4],
|
|
228
|
+
"v_opt": opt_params[4],
|
|
229
|
+
"opt_vec": opt_params,
|
|
230
|
+
}
|
|
231
|
+
elif opt_type == "exp":
|
|
232
|
+
opt_param_dict = {
|
|
233
|
+
"well_name": well_name,
|
|
234
|
+
"opt_ver": opt_type,
|
|
235
|
+
"f_ani": opt_params[0],
|
|
236
|
+
"f_con": opt_params[1],
|
|
237
|
+
"alpha_opt": opt_params[2:4],
|
|
238
|
+
"v_opt": opt_params[4],
|
|
239
|
+
"k_carb": opt_params[5],
|
|
240
|
+
"mu_carb": opt_params[6],
|
|
241
|
+
"rho_carb": opt_params[7],
|
|
242
|
+
"k_sh": opt_params[8],
|
|
243
|
+
"mu_sh": opt_params[9],
|
|
244
|
+
"rho_sh": opt_params[10],
|
|
245
|
+
"opt_vec": opt_params,
|
|
246
|
+
}
|
|
247
|
+
elif opt_type == "pat_cem":
|
|
248
|
+
opt_param_dict = {
|
|
249
|
+
"well_name": well_name,
|
|
250
|
+
"opt_ver": opt_type,
|
|
251
|
+
"weight_k": opt_params[0],
|
|
252
|
+
"weight_mu": opt_params[1],
|
|
253
|
+
"shear_red": opt_params[2],
|
|
254
|
+
"frac_cem": opt_params[3],
|
|
255
|
+
"opt_vec": opt_params,
|
|
256
|
+
}
|
|
257
|
+
elif opt_type == "const_cem":
|
|
258
|
+
opt_param_dict = {
|
|
259
|
+
"well_name": well_name,
|
|
260
|
+
"opt_ver": opt_type,
|
|
261
|
+
"phi_c": opt_params[0],
|
|
262
|
+
"shear_red": opt_params[1],
|
|
263
|
+
"frac_cem": opt_params[2],
|
|
264
|
+
"opt_vec": opt_params,
|
|
265
|
+
}
|
|
266
|
+
elif opt_type == "friable":
|
|
267
|
+
opt_param_dict = {
|
|
268
|
+
"well_name": well_name,
|
|
269
|
+
"opt_ver": opt_type,
|
|
270
|
+
"phi_c": opt_params[0],
|
|
271
|
+
"shear_red": opt_params[1],
|
|
272
|
+
"opt_vec": opt_params,
|
|
273
|
+
}
|
|
274
|
+
else:
|
|
275
|
+
raise ValueError(
|
|
276
|
+
"save_opt_params: unknown optimisation opt_type: {}".format(opt_type)
|
|
277
|
+
)
|
|
278
|
+
|
|
279
|
+
with open(file_name, "wb") as file_out:
|
|
280
|
+
pickle.dump(opt_param_dict, file_out)
|
|
281
|
+
|
|
282
|
+
|
|
283
|
+
def opt_param_info():
|
|
284
|
+
"""Hard coded dictionaries returned.
|
|
285
|
+
Returns
|
|
286
|
+
-------
|
|
287
|
+
tuple
|
|
288
|
+
parameter_translation_dict, value_translation_dict, type_translation_dict.
|
|
289
|
+
"""
|
|
290
|
+
parameter_translation_dict = {
|
|
291
|
+
"opt_ver": "Optimisation version",
|
|
292
|
+
"no_incl_sets": "Number of inclusion sets",
|
|
293
|
+
"ang_sym": "Angle of symmetry plane [°]",
|
|
294
|
+
"f_ani": "Fraction of anisotropic inclusions",
|
|
295
|
+
"f_con": "Fraction of connected inclusions",
|
|
296
|
+
"alpha_opt": "Optimal aspect ratios for inclusion sets",
|
|
297
|
+
"v_opt": "Ratio of volume for inclusion sets",
|
|
298
|
+
"k_carb": "Matrix (carbonate) bulk modulus [Pa]",
|
|
299
|
+
"mu_carb": "Matrix (carbonate) shear modulus [Pa]",
|
|
300
|
+
"rho_carb": "Matrix (carbonate) density [kg/m^3]",
|
|
301
|
+
"k_sh": "Mud/shale bulk modulus [Pa]",
|
|
302
|
+
"mu_sh": "Mud/shale shear modulus [Pa]",
|
|
303
|
+
"rho_sh": "Mud/shale density [kg/m^3]",
|
|
304
|
+
"k_sst": "Sst bulk modulus [Pa]",
|
|
305
|
+
"mu_sst": "Sst shear modulus [Pa]",
|
|
306
|
+
"rho_sst": "Sst density [kg/m^3]",
|
|
307
|
+
"frac_cem": "Cement fraction [fraction]",
|
|
308
|
+
"phi_c": "Critical porosity [fraction]",
|
|
309
|
+
"shear_red": "Reduction in tangential friction [fraction]",
|
|
310
|
+
"weight_k": "Bulk modulus weight for constant cement model",
|
|
311
|
+
"weight_mu": "Shear modulus weight for constant cement model",
|
|
312
|
+
}
|
|
313
|
+
value_translation_dict = {
|
|
314
|
+
"ang_sym": 90.0,
|
|
315
|
+
"k_carb": 95.0e9,
|
|
316
|
+
"mu_carb": 45.0e9,
|
|
317
|
+
"rho_carb": 2950.0,
|
|
318
|
+
"k_sh": 35.0e9,
|
|
319
|
+
"mu_sh": 20.0e9,
|
|
320
|
+
"rho_sh": 2750.0,
|
|
321
|
+
"k_sst": 45.0e9,
|
|
322
|
+
"mu_sst": 50.0e9,
|
|
323
|
+
"rho_sst": 2750.0,
|
|
324
|
+
}
|
|
325
|
+
type_translation_dict = {
|
|
326
|
+
"min": "PETEC (Mineral input) optimisation",
|
|
327
|
+
"exp": "Exploration type optimisation",
|
|
328
|
+
"pat_cem": "Patchy cement model",
|
|
329
|
+
"const_cem": "Constant cement model",
|
|
330
|
+
"friable": "Friable sand model",
|
|
331
|
+
}
|
|
332
|
+
return parameter_translation_dict, value_translation_dict, type_translation_dict
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def load_opt_params(file_name: str):
|
|
336
|
+
"""Utility to load parameter file from optimisation run.
|
|
337
|
+
|
|
338
|
+
Parameters
|
|
339
|
+
----------
|
|
340
|
+
file_name : str
|
|
341
|
+
Input file name including path.
|
|
342
|
+
|
|
343
|
+
Returns
|
|
344
|
+
-------
|
|
345
|
+
tuple
|
|
346
|
+
opt_type: model type, no_sets: number of inclusion sets, opt_param: with all parameters for model.
|
|
347
|
+
"""
|
|
348
|
+
with open(file_name, "rb") as fin:
|
|
349
|
+
param_dict = pickle.load(fin)
|
|
350
|
+
opt_type = param_dict["opt_ver"]
|
|
351
|
+
opt_param = param_dict["opt_vec"]
|
|
352
|
+
opt_dict = param_dict
|
|
353
|
+
|
|
354
|
+
return opt_type, opt_param, opt_dict
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
def opt_param_to_ascii(
|
|
358
|
+
in_file: str | Path,
|
|
359
|
+
display_results: bool = True,
|
|
360
|
+
out_file: str | Path | None = None,
|
|
361
|
+
well_name: str = "Unknown well",
|
|
362
|
+
**kwargs: Any,
|
|
363
|
+
) -> None:
|
|
364
|
+
"""Utility to convert stored optimised parameters to ascii and display results or save to file.
|
|
365
|
+
|
|
366
|
+
Parameters
|
|
367
|
+
----------
|
|
368
|
+
in_file : str
|
|
369
|
+
File name for stored optimised parameters.
|
|
370
|
+
display_results : bool
|
|
371
|
+
Display results on screen, default True.
|
|
372
|
+
out_file : str or None
|
|
373
|
+
Optional store optimised parameters in ascii file.
|
|
374
|
+
well_name : str
|
|
375
|
+
Optional name of the well that is used in optimisation.
|
|
376
|
+
"""
|
|
377
|
+
with open(in_file, "rb") as f_in:
|
|
378
|
+
param_dict = pickle.load(f_in)
|
|
379
|
+
if well_name.lower() == "unknown well":
|
|
380
|
+
well_name = param_dict.pop("well_name", "Unknown Well")
|
|
381
|
+
|
|
382
|
+
(
|
|
383
|
+
parameter_translation_dict,
|
|
384
|
+
value_translation_dict,
|
|
385
|
+
type_translation_dict,
|
|
386
|
+
) = opt_param_info()
|
|
387
|
+
|
|
388
|
+
item: list[str] = []
|
|
389
|
+
value: list[str] = []
|
|
390
|
+
disp_string = ""
|
|
391
|
+
for opt_key, opt_value in param_dict.items():
|
|
392
|
+
if opt_key in parameter_translation_dict:
|
|
393
|
+
if opt_key in value_translation_dict:
|
|
394
|
+
opt_value = opt_value * value_translation_dict[opt_key]
|
|
395
|
+
opt_str = f" {opt_value:.4f}"
|
|
396
|
+
elif opt_key == "opt_ver":
|
|
397
|
+
opt_str = type_translation_dict[opt_value]
|
|
398
|
+
elif opt_key == "v_opt":
|
|
399
|
+
opt_value = np.append(opt_value, 1.0 - np.sum(opt_value))
|
|
400
|
+
opt_str = f" {opt_value:}"
|
|
401
|
+
else:
|
|
402
|
+
if isinstance(opt_value, float):
|
|
403
|
+
opt_str = f" {opt_value:.4f}"
|
|
404
|
+
else:
|
|
405
|
+
opt_str = f" {opt_value:}"
|
|
406
|
+
item.append(f"{parameter_translation_dict[opt_key]}: ")
|
|
407
|
+
value.append(opt_str)
|
|
408
|
+
disp_string += f"{parameter_translation_dict[opt_key]}: {opt_str}\n"
|
|
409
|
+
info_array = np.stack((item, value), axis=1)
|
|
410
|
+
|
|
411
|
+
if display_results:
|
|
412
|
+
from tkinter import END, Entry, Tk
|
|
413
|
+
|
|
414
|
+
class Table:
|
|
415
|
+
def __init__(
|
|
416
|
+
self,
|
|
417
|
+
tk_root: Tk,
|
|
418
|
+
no_rows: int,
|
|
419
|
+
no_cols: int,
|
|
420
|
+
info: npt.NDArray[Any],
|
|
421
|
+
):
|
|
422
|
+
# code for creating table
|
|
423
|
+
str_len = np.vectorize(len)
|
|
424
|
+
text_justify: list[Literal["right", "left"]] = ["right", "left"]
|
|
425
|
+
for i in range(no_rows):
|
|
426
|
+
for j in range(no_cols):
|
|
427
|
+
just = text_justify[0] if j == 0 else text_justify[1]
|
|
428
|
+
max_len = np.max(str_len(info[:, j]))
|
|
429
|
+
self.e: Entry = Entry(
|
|
430
|
+
root,
|
|
431
|
+
width=max_len + 2,
|
|
432
|
+
fg="black",
|
|
433
|
+
font=("Consolas", 11, "normal"),
|
|
434
|
+
justify=just,
|
|
435
|
+
)
|
|
436
|
+
self.e.grid(row=i, column=j)
|
|
437
|
+
self.e.insert(END, info[i][j])
|
|
438
|
+
|
|
439
|
+
root = Tk(**kwargs)
|
|
440
|
+
if well_name.lower() == "unknown well":
|
|
441
|
+
root.title("T Matrix Optimised Parameters")
|
|
442
|
+
else:
|
|
443
|
+
root.title(well_name)
|
|
444
|
+
if sys.platform.startswith("win"):
|
|
445
|
+
ico_file = os.path.join(os.path.dirname(__file__), "Equinor_logo.ico")
|
|
446
|
+
root.iconbitmap(ico_file)
|
|
447
|
+
_ = Table(root, info_array.shape[0], info_array.shape[1], info_array)
|
|
448
|
+
_ = root.attributes("-topmost", True)
|
|
449
|
+
root.mainloop()
|
|
450
|
+
|
|
451
|
+
if out_file is not None:
|
|
452
|
+
with open(out_file, "w") as f_out:
|
|
453
|
+
_ = f_out.write(disp_string)
|
|
454
|
+
|
|
455
|
+
return
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
import inspect
|
|
2
|
+
import math
|
|
3
|
+
import re
|
|
4
|
+
from typing import Any
|
|
5
|
+
from warnings import warn
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import numpy.typing as npt
|
|
9
|
+
import pandas as pd
|
|
10
|
+
|
|
11
|
+
from rock_physics_open.equinor_utilities.various_utilities import disp_result_stats
|
|
12
|
+
|
|
13
|
+
from .snapshots import get_snapshot_name
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def compare_snapshots(
|
|
17
|
+
test_results: tuple[npt.NDArray[np.float64] | pd.DataFrame]
|
|
18
|
+
| tuple[npt.NDArray[np.float64] | pd.DataFrame],
|
|
19
|
+
saved_results: tuple[npt.NDArray[np.float64] | pd.DataFrame],
|
|
20
|
+
name_arr: list[str] | None = None,
|
|
21
|
+
display_results: bool = False,
|
|
22
|
+
) -> bool:
|
|
23
|
+
test_results = _validate_input(test_results, saved_results)
|
|
24
|
+
|
|
25
|
+
if display_results:
|
|
26
|
+
title = str(inspect.stack()[1].function)
|
|
27
|
+
if not name_arr:
|
|
28
|
+
name_arr = [f"arr_{i}" for i in range(len(test_results))]
|
|
29
|
+
disp_result_stats(title, test_results, name_arr)
|
|
30
|
+
|
|
31
|
+
r_tol = 0.01
|
|
32
|
+
equal_nan = True
|
|
33
|
+
no_difference_found = True
|
|
34
|
+
|
|
35
|
+
for i, (test_item, saved_item) in enumerate(zip(test_results, saved_results)):
|
|
36
|
+
try:
|
|
37
|
+
if name_arr:
|
|
38
|
+
err_msg = f"saved and generated result for {name_arr[i]} differ"
|
|
39
|
+
else:
|
|
40
|
+
err_msg = f"saved result for variable {i} differ"
|
|
41
|
+
np.testing.assert_allclose(
|
|
42
|
+
test_item,
|
|
43
|
+
saved_item,
|
|
44
|
+
rtol=r_tol,
|
|
45
|
+
equal_nan=equal_nan,
|
|
46
|
+
err_msg=err_msg,
|
|
47
|
+
)
|
|
48
|
+
except AssertionError as error:
|
|
49
|
+
open_mode = "w" if no_difference_found else "a"
|
|
50
|
+
no_difference_found = False
|
|
51
|
+
warn(f"comparison test failed for item {i}: {error}")
|
|
52
|
+
log_file = re.sub("npz", "log", get_snapshot_name(step=2))
|
|
53
|
+
|
|
54
|
+
with open(log_file, open_mode) as file:
|
|
55
|
+
_ = file.write(
|
|
56
|
+
f"Test function: {get_snapshot_name(include_extension=False, include_snapshot_dir=False, include_filename=False)} \n"
|
|
57
|
+
)
|
|
58
|
+
if name_arr:
|
|
59
|
+
_ = file.write(f"Test variable: {name_arr[i]} \n")
|
|
60
|
+
else:
|
|
61
|
+
_ = file.write(f"Test variable number: {i} \n")
|
|
62
|
+
|
|
63
|
+
for line in str(error).splitlines():
|
|
64
|
+
mismatched_elements_index = (
|
|
65
|
+
line.replace(" ", "").lower().find("mismatchedelements")
|
|
66
|
+
)
|
|
67
|
+
if mismatched_elements_index != -1:
|
|
68
|
+
_ = file.write(line + "\n")
|
|
69
|
+
continue
|
|
70
|
+
|
|
71
|
+
max_abs_diff_index = (
|
|
72
|
+
line.replace(" ", "").lower().find("maxabsolutedifference")
|
|
73
|
+
)
|
|
74
|
+
if max_abs_diff_index != -1:
|
|
75
|
+
_ = file.write(line + "\n")
|
|
76
|
+
continue
|
|
77
|
+
|
|
78
|
+
max_rel_diff_index = (
|
|
79
|
+
line.replace(" ", "").lower().find("maxrelativedifference")
|
|
80
|
+
)
|
|
81
|
+
if max_rel_diff_index != -1:
|
|
82
|
+
_ = file.write(line + "\n")
|
|
83
|
+
continue
|
|
84
|
+
|
|
85
|
+
reg_index = re.search(r"differ", line)
|
|
86
|
+
|
|
87
|
+
if reg_index:
|
|
88
|
+
if isinstance(test_item, np.ndarray) and isinstance(
|
|
89
|
+
saved_item, np.ndarray
|
|
90
|
+
):
|
|
91
|
+
differences, num_nans = _compare_ndarray(
|
|
92
|
+
saved_array=saved_item,
|
|
93
|
+
test_array=test_item,
|
|
94
|
+
eq_nan=equal_nan,
|
|
95
|
+
rel_tol=r_tol,
|
|
96
|
+
)
|
|
97
|
+
elif isinstance(test_item, pd.DataFrame) and isinstance(
|
|
98
|
+
saved_item, pd.DataFrame
|
|
99
|
+
):
|
|
100
|
+
differences, num_nans = _compare_df(
|
|
101
|
+
saved_results=saved_item,
|
|
102
|
+
test_results=test_item,
|
|
103
|
+
equal_nan=equal_nan,
|
|
104
|
+
r_tol=r_tol,
|
|
105
|
+
)
|
|
106
|
+
else:
|
|
107
|
+
raise TypeError(
|
|
108
|
+
f"Unsupported data type for comparison: {type(test_item)}"
|
|
109
|
+
)
|
|
110
|
+
_ = file.write(
|
|
111
|
+
"Number of NaN elements: " + str(num_nans) + "\n"
|
|
112
|
+
)
|
|
113
|
+
_ = file.write("Index:\t\tSaved:\t\tGenerated:\n")
|
|
114
|
+
|
|
115
|
+
# Write test results and saved results differences to file
|
|
116
|
+
if len(differences) > 0:
|
|
117
|
+
tab = "\t"
|
|
118
|
+
for difference in differences:
|
|
119
|
+
_ = file.write(
|
|
120
|
+
f"{tab}[{difference[0]:4}]=> {difference[1]:.8g} != {difference[2]:.8g}\n"
|
|
121
|
+
)
|
|
122
|
+
_ = file.write(f"{'_' * 40}\n")
|
|
123
|
+
return no_difference_found
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _compare_ndarray(
|
|
127
|
+
saved_array: npt.NDArray[np.float64],
|
|
128
|
+
test_array: npt.NDArray[np.float64],
|
|
129
|
+
eq_nan: bool,
|
|
130
|
+
rel_tol: float,
|
|
131
|
+
) -> tuple[list[list[Any]], int]:
|
|
132
|
+
differ_indexes = np.where(saved_array != test_array)[0]
|
|
133
|
+
differences: list[list[Any]] = []
|
|
134
|
+
num_nans = 0
|
|
135
|
+
|
|
136
|
+
for index in differ_indexes:
|
|
137
|
+
if eq_nan and (
|
|
138
|
+
np.isnan(test_array[int(index)]) and np.isnan(saved_array[int(index)])
|
|
139
|
+
):
|
|
140
|
+
num_nans += 1
|
|
141
|
+
elif math.isclose(
|
|
142
|
+
saved_array[int(index)], test_array[int(index)], rel_tol=rel_tol
|
|
143
|
+
):
|
|
144
|
+
pass
|
|
145
|
+
else:
|
|
146
|
+
differences.append([index, saved_array[index], test_array[index]])
|
|
147
|
+
return differences, num_nans
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
def _compare_df(
|
|
151
|
+
saved_results: pd.DataFrame,
|
|
152
|
+
test_results: pd.DataFrame,
|
|
153
|
+
equal_nan: bool,
|
|
154
|
+
r_tol: float,
|
|
155
|
+
) -> tuple[list[list[Any]], int]:
|
|
156
|
+
return _compare_ndarray(
|
|
157
|
+
saved_array=saved_results.to_numpy().flatten(),
|
|
158
|
+
test_array=test_results.to_numpy().flatten(),
|
|
159
|
+
eq_nan=equal_nan,
|
|
160
|
+
rel_tol=r_tol,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _validate_input(
|
|
165
|
+
test_obj: tuple[npt.NDArray[np.float64] | pd.DataFrame]
|
|
166
|
+
| npt.NDArray[np.float64]
|
|
167
|
+
| pd.DataFrame,
|
|
168
|
+
saved_obj: tuple[npt.NDArray[np.float64] | pd.DataFrame],
|
|
169
|
+
) -> tuple[npt.NDArray[np.float64] | pd.DataFrame]:
|
|
170
|
+
# Check for compatibility of test results and stored data
|
|
171
|
+
if isinstance(test_obj, (np.ndarray, pd.DataFrame)):
|
|
172
|
+
return_test_obj = (test_obj,)
|
|
173
|
+
else:
|
|
174
|
+
return_test_obj = test_obj
|
|
175
|
+
if isinstance(return_test_obj, (tuple, list)): # pyright: ignore[reportUnnecessaryIsInstance] | Kept for backward compatibility
|
|
176
|
+
if len(saved_obj) != len(return_test_obj):
|
|
177
|
+
raise ValueError(
|
|
178
|
+
f"unable to compare snapshots, different number of saved: ({len(saved_obj)}) and generated results ({len(test_obj)})"
|
|
179
|
+
)
|
|
180
|
+
else:
|
|
181
|
+
raise ValueError(
|
|
182
|
+
f"test_obj should be one of list, tuple, numpy array or pandas DataFrame, is {type(test_obj)}"
|
|
183
|
+
)
|
|
184
|
+
return return_test_obj
|