pygeoinf 1.3.3__py3-none-any.whl → 1.3.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pygeoinf/__init__.py CHANGED
@@ -76,9 +76,14 @@ from .forward_problem import ForwardProblem, LinearForwardProblem
76
76
  from .linear_optimisation import (
77
77
  LinearLeastSquaresInversion,
78
78
  LinearMinimumNormInversion,
79
+ ConstrainedLinearLeastSquaresInversion,
80
+ ConstrainedLinearMinimumNormInversion,
79
81
  )
80
82
 
81
- from .linear_bayesian import LinearBayesianInversion, LinearBayesianInference
83
+ from .linear_bayesian import (
84
+ LinearBayesianInversion,
85
+ ConstrainedLinearBayesianInversion,
86
+ )
82
87
 
83
88
  from .backus_gilbert import HyperEllipsoid
84
89
 
@@ -87,6 +92,8 @@ from .nonlinear_optimisation import (
87
92
  )
88
93
 
89
94
 
95
+ from .subspaces import OrthogonalProjector, AffineSubspace, LinearSubspace
96
+
90
97
  __all__ = [
91
98
  # random_matrix
92
99
  "fixed_rank_random_range",
@@ -144,11 +151,17 @@ __all__ = [
144
151
  # linear_optimisation
145
152
  "LinearLeastSquaresInversion",
146
153
  "LinearMinimumNormInversion",
154
+ "ConstrainedLinearLeastSquaresInversion",
155
+ "ConstrainedLinearMinimumNormInversion",
147
156
  # linear_bayesian
148
157
  "LinearBayesianInversion",
149
- "LinearBayesianInference",
158
+ "ConstrainedLinearBayesianInversion",
150
159
  # backus_gilbert
151
160
  "HyperEllipsoid",
152
161
  # nonlinear_optimisation
153
162
  "ScipyUnconstrainedOptimiser",
163
+ # Subspaces
164
+ "OrthogonalProjector",
165
+ "AffineSubspace",
166
+ "LinearSubspace",
154
167
  ]
File without changes
@@ -198,4 +198,4 @@ class HilbertSpaceAxiomChecks:
198
198
  self._check_inplace_operations(x, y, a)
199
199
  self._check_copy(x)
200
200
 
201
- print(f" All {n_checks} Hilbert space axiom checks passed successfully.")
201
+ print(f"[✓] All {n_checks} Hilbert space axiom checks passed successfully.")
@@ -194,4 +194,4 @@ class LinearOperatorAxiomChecks(NonLinearOperatorAxiomChecks):
194
194
  self, op2, x1, y, a, check_rtol=check_rtol, check_atol=check_atol
195
195
  )
196
196
 
197
- print(f" All {n_checks} linear operator checks passed successfully.")
197
+ print(f"[✓] All {n_checks} linear operator checks passed successfully.")
@@ -195,4 +195,4 @@ class NonLinearOperatorAxiomChecks:
195
195
  self, op2, x, v, check_rtol=check_rtol, check_atol=check_atol
196
196
  )
197
197
 
198
- print(f" All {n_checks} non-linear operator checks passed successfully.")
198
+ print(f"[✓] All {n_checks} non-linear operator checks passed successfully.")
pygeoinf/hilbert_space.py CHANGED
@@ -28,6 +28,7 @@ from abc import ABC, abstractmethod
28
28
  from typing import (
29
29
  TypeVar,
30
30
  List,
31
+ Union,
31
32
  Optional,
32
33
  Any,
33
34
  TYPE_CHECKING,
@@ -607,6 +608,50 @@ class EuclideanSpace(HilbertSpace):
607
608
  """
608
609
  return isinstance(x, np.ndarray) and x.shape == (self.dim,)
609
610
 
611
+ def subspace_projection(self, indices: Union[int, List[int]]) -> "LinearOperator":
612
+ """
613
+ Returns a projection operator onto specified coordinates.
614
+
615
+ This creates a linear operator that extracts the components at the given
616
+ indices, projecting from this space to a lower-dimensional Euclidean space.
617
+
618
+ Args:
619
+ indices: Single index or list of indices to project onto (0-indexed).
620
+
621
+ Returns:
622
+ LinearOperator from this space to EuclideanSpace(len(indices)).
623
+
624
+ Raises:
625
+ IndexError: If any index is out of range for this space's dimension.
626
+ """
627
+ from .linear_operators import LinearOperator
628
+
629
+ if isinstance(indices, int):
630
+ indices = [indices]
631
+
632
+ indices_array = np.array(indices)
633
+ if np.any(indices_array < 0) or np.any(indices_array >= self.dim):
634
+ raise IndexError(
635
+ f"Indices {indices_array} out of range for dimension {self.dim}"
636
+ )
637
+
638
+ target_space = EuclideanSpace(len(indices))
639
+
640
+ def forward(x: np.ndarray) -> np.ndarray:
641
+ return x[indices_array]
642
+
643
+ def adjoint_mapping(y: np.ndarray) -> np.ndarray:
644
+ result = np.zeros(self.dim)
645
+ result[indices_array] = y
646
+ return result
647
+
648
+ return LinearOperator(
649
+ self,
650
+ target_space,
651
+ forward,
652
+ adjoint_mapping=adjoint_mapping,
653
+ )
654
+
610
655
 
611
656
  class MassWeightedHilbertSpace(HilbertSpace):
612
657
  """
@@ -5,19 +5,14 @@ This module treats the inverse problem from a statistical perspective, aiming to
5
5
  determine the full posterior probability distribution of the unknown model
6
6
  parameters, rather than a single best-fit solution.
7
7
 
8
- It assumes that the prior knowledge about the model and the statistics of the
9
- data errors can be described by Gaussian measures. For a linear forward problem,
10
- the resulting posterior distribution for the model is also Gaussian, allowing
11
- for an analytical solution.
12
-
13
8
  Key Classes
14
9
  -----------
15
10
  - `LinearBayesianInversion`: Computes the posterior Gaussian measure `p(u|d)`
16
- for the model `u` given observed data `d`. This provides not only a mean
17
- estimate for the model but also its uncertainty (covariance).
11
+ for the model `u` given observed data `d`.
18
12
  - `LinearBayesianInference`: Extends the framework to compute the posterior
19
- distribution for a derived property of the model, `p(B(u)|d)`, where `B` is
20
- some linear operator.
13
+ distribution for a derived property of the model.
14
+ - `ConstrainedLinearBayesianInversion`: Solves the inverse problem subject to
15
+ a hard affine constraint `u in A`, interpreting it as conditioning the prior.
21
16
  """
22
17
 
23
18
  from __future__ import annotations
@@ -25,22 +20,19 @@ from typing import Optional
25
20
 
26
21
  from .inversion import LinearInversion
27
22
  from .gaussian_measure import GaussianMeasure
28
-
29
-
30
23
  from .forward_problem import LinearForwardProblem
31
- from .linear_operators import LinearOperator
24
+ from .linear_operators import LinearOperator, NormalSumOperator
32
25
  from .linear_solvers import LinearSolver, IterativeLinearSolver
33
- from .hilbert_space import HilbertSpace, Vector
26
+ from .hilbert_space import Vector
27
+ from .subspaces import AffineSubspace
34
28
 
35
29
 
36
30
  class LinearBayesianInversion(LinearInversion):
37
31
  """
38
32
  Solves a linear inverse problem using Bayesian methods.
39
33
 
40
- This class applies to problems of the form `d = A(u) + e`, where the prior
41
- knowledge of the model `u` and the statistics of the error `e` are described
42
- by Gaussian distributions. It computes the full posterior probability
43
- distribution `p(u|d)` for the model parameters given an observation `d`.
34
+ This class applies to problems of the form `d = A(u) + e`. It computes the
35
+ full posterior probability distribution `p(u|d)`.
44
36
  """
45
37
 
46
38
  def __init__(
@@ -49,11 +41,6 @@ class LinearBayesianInversion(LinearInversion):
49
41
  model_prior_measure: GaussianMeasure,
50
42
  /,
51
43
  ) -> None:
52
- """
53
- Args:
54
- forward_problem: The forward problem linking the model to the data.
55
- model_prior_measure: The prior Gaussian measure on the model space.
56
- """
57
44
  super().__init__(forward_problem)
58
45
  self._model_prior_measure: GaussianMeasure = model_prior_measure
59
46
 
@@ -65,45 +52,65 @@ class LinearBayesianInversion(LinearInversion):
65
52
  @property
66
53
  def normal_operator(self) -> LinearOperator:
67
54
  """
68
- Returns the covariance of the prior predictive distribution, `p(d)`.
55
+ Returns the Bayesian Norm operator:
56
+
57
+ N = A Q A* + R
58
+
59
+ with A the forward operator (with A* its adjoint), Q the model
60
+ prior covariance, and R the data error covariance. For error-free
61
+ problems this operator is reduced to:
62
+
63
+ N = A Q A*
69
64
 
70
- This operator, `C_d = A @ C_u @ A* + C_e`, represents the total
71
- expected covariance in the data space before any data is observed.
72
- Its inverse is central to calculating the posterior distribution and is
73
- often referred to as the Bayesian normal operator.
74
65
  """
75
66
  forward_operator = self.forward_problem.forward_operator
76
- prior_model_covariance = self.model_prior_measure.covariance
67
+ model_prior_covariance = self.model_prior_measure.covariance
77
68
 
78
69
  if self.forward_problem.data_error_measure_set:
79
70
  return (
80
- forward_operator @ prior_model_covariance @ forward_operator.adjoint
71
+ forward_operator @ model_prior_covariance @ forward_operator.adjoint
81
72
  + self.forward_problem.data_error_measure.covariance
82
73
  )
83
74
  else:
84
- return forward_operator @ prior_model_covariance @ forward_operator.adjoint
75
+ return NormalSumOperator(forward_operator, model_prior_covariance)
85
76
 
86
- def data_prior_measure(self) -> GaussianMeasure:
77
+ def kalman_operator(
78
+ self,
79
+ solver: LinearSolver,
80
+ /,
81
+ *,
82
+ preconditioner: Optional[LinearOperator] = None,
83
+ ):
87
84
  """
88
- Returns the prior predictive distribution on the data, `p(d)`.
85
+ Returns the Kalman gain operator for the problem:
89
86
 
90
- This measure describes the expected distribution of the data before any
91
- specific observation is made, combining the uncertainty from the prior
92
- model and the data errors.
87
+ K = Q A* Ni
88
+
89
+ where Q is the model prior covariance, A the forward operator
90
+ (with adjoint A*), and Ni is the inverse of the normal operator.
91
+
92
+ Args:
93
+ solver: A linear solver for inverting the normal operator.
94
+ preconditioner: An optional preconditioner for.
95
+
96
+ Returns:
97
+ A LinearOperator for the Kalman gain.
93
98
  """
94
- if self.forward_problem.data_error_measure_set:
95
- # d = A(u) + e => p(d) is convolution of p(A(u)) and p(e)
96
- return (
97
- self.model_prior_measure.affine_mapping(
98
- operator=self.forward_problem.forward_operator
99
- )
100
- + self.forward_problem.data_error_measure
99
+
100
+ forward_operator = self.forward_problem.forward_operator
101
+ model_prior_covariance = self.model_prior_measure.covariance
102
+ normal_operator = self.normal_operator
103
+
104
+ if isinstance(solver, IterativeLinearSolver):
105
+ inverse_normal_operator = solver(
106
+ normal_operator, preconditioner=preconditioner
101
107
  )
102
108
  else:
103
- # d = A(u) => p(d) is just the mapping of the model prior
104
- return self.model_prior_measure.affine_mapping(
105
- operator=self.forward_problem.forward_operator
106
- )
109
+ inverse_normal_operator = solver(normal_operator)
110
+
111
+ return (
112
+ model_prior_covariance @ forward_operator.adjoint @ inverse_normal_operator
113
+ )
107
114
 
108
115
  def model_posterior_measure(
109
116
  self,
@@ -114,34 +121,21 @@ class LinearBayesianInversion(LinearInversion):
114
121
  preconditioner: Optional[LinearOperator] = None,
115
122
  ) -> GaussianMeasure:
116
123
  """
117
- Returns the posterior Gaussian measure for the model, `p(u|d)`.
118
-
119
- This measure represents our updated state of knowledge about the model
120
- `u` after observing the data `d`. Its expectation is the most likely
121
- model, and its covariance quantifies the remaining uncertainty.
124
+ Returns the posterior Gaussian measure for the model conditions on the data.
122
125
 
123
126
  Args:
124
127
  data: The observed data vector.
125
- solver: A linear solver for inverting the normal operator.
126
- preconditioner: An optional preconditioner for iterative solvers.
127
-
128
- Returns:
129
- The posterior `GaussianMeasure` on the model space.
128
+ solver: A linear solver for inverting the normal operator C_d.
129
+ preconditioner: An optional preconditioner for C_d.
130
130
  """
131
131
  data_space = self.data_space
132
132
  model_space = self.model_space
133
133
  forward_operator = self.forward_problem.forward_operator
134
- prior_model_covariance = self.model_prior_measure.covariance
135
- normal_operator = self.normal_operator
134
+ model_prior_covariance = self.model_prior_measure.covariance
136
135
 
137
- if isinstance(solver, IterativeLinearSolver):
138
- inverse_normal_operator = solver(
139
- normal_operator, preconditioner=preconditioner
140
- )
141
- else:
142
- inverse_normal_operator = solver(normal_operator)
136
+ kalman_gain = self.kalman_operator(solver, preconditioner=preconditioner)
143
137
 
144
- # Calculate posterior mean: mu_post = mu_u + C_u*A^T*C_d^-1*(d - A*mu_u - mu_e)
138
+ # u_bar_post = u_bar + K (v - A u_bar - v_bar)
145
139
  shifted_data = data_space.subtract(
146
140
  data, forward_operator(self.model_prior_measure.expectation)
147
141
  )
@@ -149,97 +143,161 @@ class LinearBayesianInversion(LinearInversion):
149
143
  shifted_data = data_space.subtract(
150
144
  shifted_data, self.forward_problem.data_error_measure.expectation
151
145
  )
152
-
153
- mean_update = (
154
- prior_model_covariance @ forward_operator.adjoint @ inverse_normal_operator
155
- )(shifted_data)
146
+ mean_update = kalman_gain(shifted_data)
156
147
  expectation = model_space.add(self.model_prior_measure.expectation, mean_update)
157
148
 
158
- # Calculate posterior covariance: C_post = C_u - C_u*A^T*C_d^-1*A*C_u
159
- covariance = prior_model_covariance - (
160
- prior_model_covariance
161
- @ forward_operator.adjoint
162
- @ inverse_normal_operator
163
- @ forward_operator
164
- @ prior_model_covariance
149
+ # Q_post = Q - K A Q
150
+ covariance = model_prior_covariance - (
151
+ kalman_gain @ forward_operator @ model_prior_covariance
152
+ )
153
+
154
+ # Add in a sampling method if that is possible.
155
+ can_sample_prior = self.model_prior_measure.sample_set
156
+ can_sample_noise = (
157
+ not self.forward_problem.data_error_measure_set
158
+ or self.forward_problem.data_error_measure.sample_set
165
159
  )
166
160
 
167
- return GaussianMeasure(covariance=covariance, expectation=expectation)
161
+ if can_sample_prior and can_sample_noise:
162
+
163
+ if self.forward_problem.data_error_measure_set:
164
+ error_expectation = self.forward_problem.data_error_measure.expectation
165
+
166
+ def sample():
167
+ model_sample = self.model_prior_measure.sample()
168
+ prediction = forward_operator(model_sample)
169
+ data_residual = data_space.subtract(data, prediction)
170
+
171
+ if self.forward_problem.data_error_measure_set:
172
+ noise_raw = self.forward_problem.data_error_measure.sample()
173
+ epsilon = data_space.subtract(noise_raw, error_expectation)
174
+ data_space.axpy(1.0, epsilon, data_residual)
168
175
 
176
+ correction = kalman_gain(data_residual)
177
+ return model_space.add(model_sample, correction)
169
178
 
170
- class LinearBayesianInference(LinearBayesianInversion):
179
+ return GaussianMeasure(
180
+ covariance=covariance, expectation=expectation, sample=sample
181
+ )
182
+ else:
183
+ return GaussianMeasure(covariance=covariance, expectation=expectation)
184
+
185
+
186
+ class ConstrainedLinearBayesianInversion(LinearInversion):
171
187
  """
172
- Performs Bayesian inference on a derived property of the model.
188
+ Solves a linear inverse problem using Bayesian methods subject to an
189
+ affine subspace constraint `u in A`.
173
190
 
174
- While `LinearBayesianInversion` solves for the model `u` itself, this class
175
- computes the posterior distribution for a property `p = B(u)`, where `B` is a
176
- linear operator acting on the model `u`. This is useful for uncertainty
177
- quantification of derived quantities (e.g., the average value of a field).
191
+ This interprets the constraint as conditioning the prior on the subspace.
192
+ The subspace must be defined by a linear equation B(u) = w.
178
193
  """
179
194
 
180
195
  def __init__(
181
196
  self,
182
197
  forward_problem: LinearForwardProblem,
183
198
  model_prior_measure: GaussianMeasure,
184
- property_operator: LinearOperator,
199
+ constraint: AffineSubspace,
185
200
  /,
201
+ *,
202
+ geometric: bool = False,
186
203
  ) -> None:
187
204
  """
188
205
  Args:
189
- forward_problem: The forward problem linking the model to the data.
190
- model_prior_measure: The prior Gaussian measure on the model space.
191
- property_operator: The linear operator `B` that maps a model `u` to
192
- a property `p`.
206
+ forward_problem: The forward problem.
207
+ model_prior_measure: The unconstrained prior Gaussian measure.
208
+ constraint: The affine subspace A = {u | Bu = w}.
209
+ geometric: If True, uses orthogonal projection to enforce the constraint.
210
+ If False (default), uses Bayesian conditioning.
193
211
  """
194
- super().__init__(forward_problem, model_prior_measure)
195
- if property_operator.domain != self.forward_problem.model_space:
196
- raise ValueError("Property operator domain must match the model space.")
197
- self._property_operator: LinearOperator = property_operator
198
-
199
- @property
200
- def property_space(self) -> HilbertSpace:
201
- """The Hilbert space in which the property `p` resides."""
202
- return self._property_operator.codomain
203
-
204
- @property
205
- def property_operator(self) -> LinearOperator:
206
- """The linear operator `B` that defines the property."""
207
- return self._property_operator
212
+ super().__init__(forward_problem)
213
+ self._unconstrained_prior = model_prior_measure
214
+ self._constraint = constraint
215
+ self._geometric = geometric
216
+
217
+ if not constraint.has_constraint_equation:
218
+ raise ValueError(
219
+ "For Bayesian inversion, the subspace must be defined by a linear "
220
+ "equation (constraint operator). Use AffineSubspace.from_linear_equation."
221
+ )
208
222
 
209
- def property_prior_measure(self) -> GaussianMeasure:
223
+ def conditioned_prior_measure(
224
+ self,
225
+ solver: LinearSolver,
226
+ preconditioner: Optional[LinearOperator] = None,
227
+ ) -> GaussianMeasure:
210
228
  """
211
- Returns the prior measure on the property space, `p(p)`.
229
+ Computes the prior measure conditioned on the constraint B(u) = w.
212
230
 
213
- This is computed by propagating the model prior through the property
214
- operator.
231
+ Args:
232
+ solver: Linear solver used to invert the normal operator, BQB*.
233
+ preconditioner: Optional preconditioner for the constraint solver.
215
234
  """
216
- return self.model_prior_measure.affine_mapping(operator=self.property_operator)
217
235
 
218
- def property_posterior_measure(
236
+ constraint_op = self._constraint.constraint_operator
237
+ constraint_val = self._constraint.constraint_value
238
+
239
+ if self._geometric:
240
+ # --- Geometric Approach (Affine Mapping) ---
241
+ # Map: u -> P u + v
242
+ # P = I - B* (B B*)^-1 B
243
+ # v = B* (B B*)^-1 w
244
+
245
+ gram_operator = constraint_op @ constraint_op.adjoint
246
+
247
+ if isinstance(solver, IterativeLinearSolver):
248
+ inv_gram_operator = solver(gram_operator, preconditioner=preconditioner)
249
+ else:
250
+ inv_gram_operator = solver(gram_operator)
251
+
252
+ pseudo_inverse = constraint_op.adjoint @ inv_gram_operator
253
+ identity = self._unconstrained_prior.domain.identity_operator()
254
+ projector = identity - pseudo_inverse @ constraint_op
255
+ translation = pseudo_inverse(constraint_val)
256
+
257
+ return self._unconstrained_prior.affine_mapping(
258
+ operator=projector, translation=translation
259
+ )
260
+
261
+ else:
262
+ # --- Bayesian Approach (Statistical Conditioning) ---
263
+ # Treat the constraint as a noiseless observation: w = B(u)
264
+
265
+ constraint_problem = LinearForwardProblem(constraint_op)
266
+ constraint_inversion = LinearBayesianInversion(
267
+ constraint_problem, self._unconstrained_prior
268
+ )
269
+
270
+ return constraint_inversion.model_posterior_measure(
271
+ constraint_val, solver, preconditioner=preconditioner
272
+ )
273
+
274
+ def model_posterior_measure(
219
275
  self,
220
276
  data: Vector,
221
277
  solver: LinearSolver,
222
- /,
278
+ constraint_solver: LinearSolver,
223
279
  *,
224
280
  preconditioner: Optional[LinearOperator] = None,
281
+ constraint_preconditioner: Optional[LinearOperator] = None,
225
282
  ) -> GaussianMeasure:
226
283
  """
227
- Returns the posterior measure on the property space, `p(p|d)`.
228
-
229
- This is computed by first finding the posterior measure for the model,
230
- `p(u|d)`, and then propagating it through the property operator `B`.
284
+ Returns the posterior Gaussian measure for the model given the constraint and the data.
231
285
 
232
286
  Args:
233
- data: The observed data vector.
234
- solver: A linear solver for the normal equations.
235
- preconditioner: An optional preconditioner for iterative solvers.
236
-
237
- Returns:
238
- The posterior `GaussianMeasure` on the property space.
287
+ data: Observed data vector.
288
+ solver: Solver for the data update (inverts A C_cond A* + Ce).
289
+ constraint_solver: Solver for the prior conditioning (inverts B C_prior B*).
290
+ preconditioner: Preconditioner for the data update (acts on Data Space).
291
+ constraint_preconditioner: Preconditioner for the constraint update (acts on Property Space).
239
292
  """
240
- # First, find the posterior distribution for the model u.
241
- model_posterior = self.model_posterior_measure(
293
+ # 1. Condition Prior (Uses constraint_solver and constraint_preconditioner)
294
+ cond_prior = self.conditioned_prior_measure(
295
+ constraint_solver, preconditioner=constraint_preconditioner
296
+ )
297
+
298
+ # 2. Solve Bayesian Inverse Problem (Uses solver and preconditioner)
299
+ bayes_inv = LinearBayesianInversion(self.forward_problem, cond_prior)
300
+
301
+ return bayes_inv.model_posterior_measure(
242
302
  data, solver, preconditioner=preconditioner
243
303
  )
244
- # Then, map that distribution to the property space.
245
- return model_posterior.affine_mapping(operator=self.property_operator)