pygeoinf 1.3.5__py3-none-any.whl → 1.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pygeoinf/__init__.py +18 -0
- pygeoinf/linear_bayesian.py +53 -111
- pygeoinf/linear_optimisation.py +45 -226
- pygeoinf/linear_solvers.py +430 -0
- pygeoinf/preconditioners.py +140 -0
- pygeoinf/random_matrix.py +8 -5
- pygeoinf/subspaces.py +132 -40
- pygeoinf/symmetric_space/sh_tools.py +19 -7
- pygeoinf/symmetric_space/sphere.py +46 -58
- {pygeoinf-1.3.5.dist-info → pygeoinf-1.3.7.dist-info}/METADATA +1 -1
- {pygeoinf-1.3.5.dist-info → pygeoinf-1.3.7.dist-info}/RECORD +13 -12
- {pygeoinf-1.3.5.dist-info → pygeoinf-1.3.7.dist-info}/WHEEL +0 -0
- {pygeoinf-1.3.5.dist-info → pygeoinf-1.3.7.dist-info}/licenses/LICENSE +0 -0
pygeoinf/__init__.py
CHANGED
|
@@ -69,6 +69,16 @@ from .linear_solvers import (
|
|
|
69
69
|
BICGStabMatrixSolver,
|
|
70
70
|
GMRESMatrixSolver,
|
|
71
71
|
CGSolver,
|
|
72
|
+
MinResSolver,
|
|
73
|
+
BICGStabSolver,
|
|
74
|
+
FCGSolver,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
from .preconditioners import (
|
|
78
|
+
JacobiPreconditioningMethod,
|
|
79
|
+
SpectralPreconditioningMethod,
|
|
80
|
+
IdentityPreconditioningMethod,
|
|
81
|
+
IterativePreconditioningMethod,
|
|
72
82
|
)
|
|
73
83
|
|
|
74
84
|
from .forward_problem import ForwardProblem, LinearForwardProblem
|
|
@@ -145,6 +155,14 @@ __all__ = [
|
|
|
145
155
|
"BICGStabMatrixSolver",
|
|
146
156
|
"GMRESMatrixSolver",
|
|
147
157
|
"CGSolver",
|
|
158
|
+
"MinResSolver",
|
|
159
|
+
"BICGStabSolver",
|
|
160
|
+
"FCGSolver",
|
|
161
|
+
# preconditioners
|
|
162
|
+
"IdentityPreconditioningMethod",
|
|
163
|
+
"JacobiPreconditioningMethod",
|
|
164
|
+
"SpectralPreconditioningMethod",
|
|
165
|
+
"IterativePreconditioningMethod",
|
|
148
166
|
# forward_problem
|
|
149
167
|
"ForwardProblem",
|
|
150
168
|
"LinearForwardProblem",
|
pygeoinf/linear_bayesian.py
CHANGED
|
@@ -9,10 +9,8 @@ Key Classes
|
|
|
9
9
|
-----------
|
|
10
10
|
- `LinearBayesianInversion`: Computes the posterior Gaussian measure `p(u|d)`
|
|
11
11
|
for the model `u` given observed data `d`.
|
|
12
|
-
- `LinearBayesianInference`: Extends the framework to compute the posterior
|
|
13
|
-
distribution for a derived property of the model.
|
|
14
12
|
- `ConstrainedLinearBayesianInversion`: Solves the inverse problem subject to
|
|
15
|
-
|
|
13
|
+
an affine constraint `u in A`.
|
|
16
14
|
"""
|
|
17
15
|
|
|
18
16
|
from __future__ import annotations
|
|
@@ -41,6 +39,11 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
41
39
|
model_prior_measure: GaussianMeasure,
|
|
42
40
|
/,
|
|
43
41
|
) -> None:
|
|
42
|
+
"""
|
|
43
|
+
Args:
|
|
44
|
+
forward_problem: The forward problem linking the model to the data.
|
|
45
|
+
model_prior_measure: The prior Gaussian measure on the model space.
|
|
46
|
+
"""
|
|
44
47
|
super().__init__(forward_problem)
|
|
45
48
|
self._model_prior_measure: GaussianMeasure = model_prior_measure
|
|
46
49
|
|
|
@@ -52,16 +55,7 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
52
55
|
@property
|
|
53
56
|
def normal_operator(self) -> LinearOperator:
|
|
54
57
|
"""
|
|
55
|
-
Returns the Bayesian
|
|
56
|
-
|
|
57
|
-
N = A Q A* + R
|
|
58
|
-
|
|
59
|
-
with A the forward operator (with A* its adjoint), Q the model
|
|
60
|
-
prior covariance, and R the data error covariance. For error-free
|
|
61
|
-
problems this operator is reduced to:
|
|
62
|
-
|
|
63
|
-
N = A Q A*
|
|
64
|
-
|
|
58
|
+
Returns the Bayesian Normal operator: N = A Q A* + R.
|
|
65
59
|
"""
|
|
66
60
|
forward_operator = self.forward_problem.forward_operator
|
|
67
61
|
model_prior_covariance = self.model_prior_measure.covariance
|
|
@@ -80,23 +74,10 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
80
74
|
/,
|
|
81
75
|
*,
|
|
82
76
|
preconditioner: Optional[LinearOperator] = None,
|
|
83
|
-
):
|
|
77
|
+
) -> LinearOperator:
|
|
84
78
|
"""
|
|
85
|
-
Returns the Kalman gain operator
|
|
86
|
-
|
|
87
|
-
K = Q A* Ni
|
|
88
|
-
|
|
89
|
-
where Q is the model prior covariance, A the forward operator
|
|
90
|
-
(with adjoint A*), and Ni is the inverse of the normal operator.
|
|
91
|
-
|
|
92
|
-
Args:
|
|
93
|
-
solver: A linear solver for inverting the normal operator.
|
|
94
|
-
preconditioner: An optional preconditioner for.
|
|
95
|
-
|
|
96
|
-
Returns:
|
|
97
|
-
A LinearOperator for the Kalman gain.
|
|
79
|
+
Returns the Kalman gain operator K = Q A* N^-1.
|
|
98
80
|
"""
|
|
99
|
-
|
|
100
81
|
forward_operator = self.forward_problem.forward_operator
|
|
101
82
|
model_prior_covariance = self.model_prior_measure.covariance
|
|
102
83
|
normal_operator = self.normal_operator
|
|
@@ -121,37 +102,45 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
121
102
|
preconditioner: Optional[LinearOperator] = None,
|
|
122
103
|
) -> GaussianMeasure:
|
|
123
104
|
"""
|
|
124
|
-
Returns the posterior Gaussian measure
|
|
105
|
+
Returns the posterior Gaussian measure p(u|d).
|
|
125
106
|
|
|
126
107
|
Args:
|
|
127
108
|
data: The observed data vector.
|
|
128
|
-
solver: A linear solver for inverting the normal operator
|
|
129
|
-
preconditioner: An optional preconditioner
|
|
109
|
+
solver: A linear solver for inverting the normal operator.
|
|
110
|
+
preconditioner: An optional preconditioner.
|
|
130
111
|
"""
|
|
131
112
|
data_space = self.data_space
|
|
132
113
|
model_space = self.model_space
|
|
133
114
|
forward_operator = self.forward_problem.forward_operator
|
|
134
115
|
model_prior_covariance = self.model_prior_measure.covariance
|
|
135
116
|
|
|
117
|
+
# 1. Compute Kalman Gain
|
|
136
118
|
kalman_gain = self.kalman_operator(solver, preconditioner=preconditioner)
|
|
137
119
|
|
|
138
|
-
#
|
|
120
|
+
# 2. Compute Posterior Mean
|
|
121
|
+
# Shift data: d - A(mu_u)
|
|
139
122
|
shifted_data = data_space.subtract(
|
|
140
123
|
data, forward_operator(self.model_prior_measure.expectation)
|
|
141
124
|
)
|
|
125
|
+
|
|
126
|
+
# Shift for noise mean: d - A(mu_u) - mu_e
|
|
142
127
|
if self.forward_problem.data_error_measure_set:
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
128
|
+
error_expectation = self.forward_problem.data_error_measure.expectation
|
|
129
|
+
shifted_data = data_space.subtract(shifted_data, error_expectation)
|
|
130
|
+
else:
|
|
131
|
+
error_expectation = data_space.zero
|
|
132
|
+
|
|
146
133
|
mean_update = kalman_gain(shifted_data)
|
|
147
134
|
expectation = model_space.add(self.model_prior_measure.expectation, mean_update)
|
|
148
135
|
|
|
149
|
-
#
|
|
136
|
+
# 3. Compute Posterior Covariance (Implicitly)
|
|
137
|
+
# C_post = C_u - K A C_u
|
|
150
138
|
covariance = model_prior_covariance - (
|
|
151
139
|
kalman_gain @ forward_operator @ model_prior_covariance
|
|
152
140
|
)
|
|
153
141
|
|
|
154
|
-
#
|
|
142
|
+
# 4. Set up Posterior Sampling
|
|
143
|
+
# Logic: Can sample if prior is samplable AND (noise is absent OR samplable)
|
|
155
144
|
can_sample_prior = self.model_prior_measure.sample_set
|
|
156
145
|
can_sample_noise = (
|
|
157
146
|
not self.forward_problem.data_error_measure_set
|
|
@@ -160,19 +149,21 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
160
149
|
|
|
161
150
|
if can_sample_prior and can_sample_noise:
|
|
162
151
|
|
|
163
|
-
if self.forward_problem.data_error_measure_set:
|
|
164
|
-
error_expectation = self.forward_problem.data_error_measure.expectation
|
|
165
|
-
|
|
166
152
|
def sample():
|
|
153
|
+
# a. Sample Prior
|
|
167
154
|
model_sample = self.model_prior_measure.sample()
|
|
155
|
+
|
|
156
|
+
# b. Calculate Residual
|
|
168
157
|
prediction = forward_operator(model_sample)
|
|
169
158
|
data_residual = data_space.subtract(data, prediction)
|
|
170
159
|
|
|
160
|
+
# c. Perturb Residual
|
|
171
161
|
if self.forward_problem.data_error_measure_set:
|
|
172
162
|
noise_raw = self.forward_problem.data_error_measure.sample()
|
|
173
163
|
epsilon = data_space.subtract(noise_raw, error_expectation)
|
|
174
164
|
data_space.axpy(1.0, epsilon, data_residual)
|
|
175
165
|
|
|
166
|
+
# d. Update
|
|
176
167
|
correction = kalman_gain(data_residual)
|
|
177
168
|
return model_space.add(model_sample, correction)
|
|
178
169
|
|
|
@@ -185,11 +176,13 @@ class LinearBayesianInversion(LinearInversion):
|
|
|
185
176
|
|
|
186
177
|
class ConstrainedLinearBayesianInversion(LinearInversion):
|
|
187
178
|
"""
|
|
188
|
-
Solves a linear inverse problem
|
|
189
|
-
affine subspace constraint `u in A`.
|
|
179
|
+
Solves a linear inverse problem subject to an affine subspace constraint.
|
|
190
180
|
|
|
191
|
-
This
|
|
192
|
-
|
|
181
|
+
This class enforces the constraint `u in A` using either:
|
|
182
|
+
1. Bayesian Conditioning (Default): p(u | d, u in A).
|
|
183
|
+
If A is defined geometrically (no explicit equation), an implicit
|
|
184
|
+
operator (I-P) is used, which requires a robust solver in the subspace.
|
|
185
|
+
2. Geometric Projection: Projects the unconstrained posterior onto A.
|
|
193
186
|
"""
|
|
194
187
|
|
|
195
188
|
def __init__(
|
|
@@ -205,8 +198,8 @@ class ConstrainedLinearBayesianInversion(LinearInversion):
|
|
|
205
198
|
Args:
|
|
206
199
|
forward_problem: The forward problem.
|
|
207
200
|
model_prior_measure: The unconstrained prior Gaussian measure.
|
|
208
|
-
constraint: The affine subspace A
|
|
209
|
-
geometric: If True, uses orthogonal projection
|
|
201
|
+
constraint: The affine subspace A.
|
|
202
|
+
geometric: If True, uses orthogonal projection (Euclidean metric).
|
|
210
203
|
If False (default), uses Bayesian conditioning.
|
|
211
204
|
"""
|
|
212
205
|
super().__init__(forward_problem)
|
|
@@ -214,88 +207,37 @@ class ConstrainedLinearBayesianInversion(LinearInversion):
|
|
|
214
207
|
self._constraint = constraint
|
|
215
208
|
self._geometric = geometric
|
|
216
209
|
|
|
217
|
-
|
|
218
|
-
raise ValueError(
|
|
219
|
-
"For Bayesian inversion, the subspace must be defined by a linear "
|
|
220
|
-
"equation (constraint operator). Use AffineSubspace.from_linear_equation."
|
|
221
|
-
)
|
|
222
|
-
|
|
223
|
-
def conditioned_prior_measure(
|
|
224
|
-
self,
|
|
225
|
-
solver: LinearSolver,
|
|
226
|
-
preconditioner: Optional[LinearOperator] = None,
|
|
227
|
-
) -> GaussianMeasure:
|
|
210
|
+
def conditioned_prior_measure(self) -> GaussianMeasure:
|
|
228
211
|
"""
|
|
229
|
-
Computes the prior measure conditioned on the constraint
|
|
230
|
-
|
|
231
|
-
Args:
|
|
232
|
-
solver: Linear solver used to invert the normal operator, BQB*.
|
|
233
|
-
preconditioner: Optional preconditioner for the constraint solver.
|
|
212
|
+
Computes the prior measure conditioned on the constraint.
|
|
234
213
|
"""
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
if self._geometric:
|
|
240
|
-
# --- Geometric Approach (Affine Mapping) ---
|
|
241
|
-
# Map: u -> P u + v
|
|
242
|
-
# P = I - B* (B B*)^-1 B
|
|
243
|
-
# v = B* (B B*)^-1 w
|
|
244
|
-
|
|
245
|
-
gram_operator = constraint_op @ constraint_op.adjoint
|
|
246
|
-
|
|
247
|
-
if isinstance(solver, IterativeLinearSolver):
|
|
248
|
-
inv_gram_operator = solver(gram_operator, preconditioner=preconditioner)
|
|
249
|
-
else:
|
|
250
|
-
inv_gram_operator = solver(gram_operator)
|
|
251
|
-
|
|
252
|
-
pseudo_inverse = constraint_op.adjoint @ inv_gram_operator
|
|
253
|
-
identity = self._unconstrained_prior.domain.identity_operator()
|
|
254
|
-
projector = identity - pseudo_inverse @ constraint_op
|
|
255
|
-
translation = pseudo_inverse(constraint_val)
|
|
256
|
-
|
|
257
|
-
return self._unconstrained_prior.affine_mapping(
|
|
258
|
-
operator=projector, translation=translation
|
|
259
|
-
)
|
|
260
|
-
|
|
261
|
-
else:
|
|
262
|
-
# --- Bayesian Approach (Statistical Conditioning) ---
|
|
263
|
-
# Treat the constraint as a noiseless observation: w = B(u)
|
|
264
|
-
|
|
265
|
-
constraint_problem = LinearForwardProblem(constraint_op)
|
|
266
|
-
constraint_inversion = LinearBayesianInversion(
|
|
267
|
-
constraint_problem, self._unconstrained_prior
|
|
268
|
-
)
|
|
269
|
-
|
|
270
|
-
return constraint_inversion.model_posterior_measure(
|
|
271
|
-
constraint_val, solver, preconditioner=preconditioner
|
|
272
|
-
)
|
|
214
|
+
return self._constraint.condition_gaussian_measure(
|
|
215
|
+
self._unconstrained_prior, geometric=self._geometric
|
|
216
|
+
)
|
|
273
217
|
|
|
274
218
|
def model_posterior_measure(
|
|
275
219
|
self,
|
|
276
220
|
data: Vector,
|
|
277
221
|
solver: LinearSolver,
|
|
278
|
-
|
|
222
|
+
/,
|
|
279
223
|
*,
|
|
280
224
|
preconditioner: Optional[LinearOperator] = None,
|
|
281
|
-
constraint_preconditioner: Optional[LinearOperator] = None,
|
|
282
225
|
) -> GaussianMeasure:
|
|
283
226
|
"""
|
|
284
|
-
Returns the posterior Gaussian measure
|
|
227
|
+
Returns the posterior Gaussian measure p(u | d, u in A).
|
|
285
228
|
|
|
286
229
|
Args:
|
|
287
230
|
data: Observed data vector.
|
|
288
231
|
solver: Solver for the data update (inverts A C_cond A* + Ce).
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
232
|
+
preconditioner: Preconditioner for the data update.
|
|
233
|
+
|
|
234
|
+
Note: The solver for the constraint update is managed internally by
|
|
235
|
+
the AffineSubspace object passed at initialization.
|
|
292
236
|
"""
|
|
293
|
-
# 1. Condition Prior
|
|
294
|
-
cond_prior = self.conditioned_prior_measure(
|
|
295
|
-
constraint_solver, preconditioner=constraint_preconditioner
|
|
296
|
-
)
|
|
237
|
+
# 1. Condition Prior
|
|
238
|
+
cond_prior = self.conditioned_prior_measure()
|
|
297
239
|
|
|
298
|
-
# 2. Solve Bayesian Inverse Problem
|
|
240
|
+
# 2. Solve Bayesian Inverse Problem with the new prior
|
|
299
241
|
bayes_inv = LinearBayesianInversion(self.forward_problem, cond_prior)
|
|
300
242
|
|
|
301
243
|
return bayes_inv.model_posterior_measure(
|