CUQIpy 1.3.0.post0.dev395__py3-none-any.whl → 1.4.0.post0.dev13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of CUQIpy might be problematic. Click here for more details.

cuqi/_version.py CHANGED
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2025-09-19T16:37:46+0300",
11
+ "date": "2025-10-09T01:04:51+0300",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "2cf72ec9af9af17dad4bb3870ee20d303376de24",
15
- "version": "1.3.0.post0.dev395"
14
+ "full-revisionid": "3b38e05b811faaaa6eb273ee4a1d03438734ddd6",
15
+ "version": "1.4.0.post0.dev13"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
cuqi/density/_density.py CHANGED
@@ -143,7 +143,15 @@ class Density(ABC):
143
143
  def enable_FD(self, epsilon=1e-8):
144
144
  """ Enable finite difference approximation for logd gradient. Note
145
145
  that if enabled, the FD approximation will be used even if the
146
- _gradient method is implemented. """
146
+ _gradient method is implemented.
147
+
148
+ Parameters
149
+ ----------
150
+ epsilon : float
151
+
152
+ Spacing (step size) to use for finite difference approximation for logd
153
+ gradient for each variable. Default is 1e-8.
154
+ """
147
155
  self._FD_enabled = True
148
156
  self._FD_epsilon = epsilon
149
157
 
@@ -84,6 +84,8 @@ class JointDistribution:
84
84
  cond_vars = self._get_conditioning_variables()
85
85
  if len(cond_vars) > 0:
86
86
  raise ValueError(f"Every density parameter must have a distribution (prior). Missing prior for {cond_vars}.")
87
+ # Initialize finite difference gradient approximation settings
88
+ self.disable_FD()
87
89
 
88
90
  # --------- Public properties ---------
89
91
  @property
@@ -96,6 +98,38 @@ class JointDistribution:
96
98
  """ Returns the geometries of the joint distribution. """
97
99
  return [dist.geometry for dist in self._distributions]
98
100
 
101
+ @property
102
+ def FD_enabled(self):
103
+ """ Returns a dictionary of keys and booleans indicating for each
104
+ parameter name (key) if finite difference approximation of the logd
105
+ gradient is enabled. """
106
+ par_names = self.get_parameter_names()
107
+ FD_enabled = {
108
+ par_name: self.FD_epsilon[par_name] is not None for par_name in par_names
109
+ }
110
+ return FD_enabled
111
+
112
+ @property
113
+ def FD_epsilon(self):
114
+ """ Returns a dictionary indicating for each parameter name the
115
+ spacing for the finite difference approximation of the logd gradient."""
116
+ return self._FD_epsilon
117
+
118
+ @FD_epsilon.setter
119
+ def FD_epsilon(self, value):
120
+ """ Set the spacing for the finite difference approximation of the
121
+ logd gradient as a dictionary. The keys are the parameter names.
122
+ The value for each key is either None (no FD approximation) or a float
123
+ representing the FD step size.
124
+ """
125
+ par_names = self.get_parameter_names()
126
+ if value is None:
127
+ self._FD_epsilon = {par_name: None for par_name in par_names}
128
+ else:
129
+ if set(value.keys()) != set(par_names):
130
+ raise ValueError("Keys of FD_epsilon must match the parameter names of the distribution "+f" {par_names}")
131
+ self._FD_epsilon = value
132
+
99
133
  # --------- Public methods ---------
100
134
  def logd(self, *args, **kwargs):
101
135
  """ Evaluate the un-normalized log density function. """
@@ -136,6 +170,33 @@ class JointDistribution:
136
170
  # Can reduce to Posterior, Likelihood or Distribution.
137
171
  return new_joint._reduce_to_single_density()
138
172
 
173
+ def enable_FD(self, epsilon=None):
174
+ """ Enable finite difference approximation for logd gradient. Note
175
+ that if enabled, the FD approximation will be used even if the
176
+ _gradient method is implemented. By default, all parameters
177
+ will have FD enabled with a step size of 1e-8.
178
+
179
+ Parameters
180
+ ----------
181
+ epsilon : dict, *optional*
182
+
183
+ Dictionary indicating the spacing (step size) to use for finite
184
+ difference approximation for logd gradient for each variable.
185
+
186
+ Keys are variable names.
187
+ Values are either a float to enable FD with the given value as the FD
188
+ step size, or None to disable FD for that variable. Default is 1e-8 for
189
+ all variables.
190
+ """
191
+ if epsilon is None:
192
+ epsilon = {par_name: 1e-8 for par_name in self.get_parameter_names()}
193
+ self.FD_epsilon = epsilon
194
+
195
+ def disable_FD(self):
196
+ """ Disable finite difference approximation for logd gradient. """
197
+ par_names = self.get_parameter_names()
198
+ self.FD_epsilon = {par_name: None for par_name in par_names}
199
+
139
200
  def get_parameter_names(self) -> List[str]:
140
201
  """ Returns the parameter names of the joint distribution. """
141
202
  return [dist.name for dist in self._distributions]
@@ -202,34 +263,58 @@ class JointDistribution:
202
263
  # Count number of distributions and likelihoods
203
264
  n_dist = len(self._distributions)
204
265
  n_likelihood = len(self._likelihoods)
266
+ reduced_FD_epsilon = {par_name:self.FD_epsilon[par_name] for par_name in self.get_parameter_names()}
267
+ self.enable_FD(epsilon=reduced_FD_epsilon)
205
268
 
206
269
  # Cant reduce if there are multiple distributions or likelihoods
207
270
  if n_dist > 1:
208
271
  return self
209
272
 
273
+ # If only evaluated densities left return joint to ensure logd method is available
274
+ if n_dist == 0 and n_likelihood == 0:
275
+ return self
276
+
277
+ # Extract the parameter name of the distribution
278
+ if n_dist == 1:
279
+ par_name = self._distributions[0].name
280
+ elif n_likelihood == 1:
281
+ par_name = self._likelihoods[0].name
282
+ else:
283
+ par_name = None
284
+
210
285
  # If exactly one distribution and multiple likelihoods reduce
211
286
  if n_dist == 1 and n_likelihood > 1:
212
- return MultipleLikelihoodPosterior(*self._densities)
213
-
287
+ reduced_distribution = MultipleLikelihoodPosterior(*self._densities)
288
+ reduced_FD_epsilon = {par_name:self.FD_epsilon[par_name]}
289
+
214
290
  # If exactly one distribution and one likelihood its a Posterior
215
291
  if n_dist == 1 and n_likelihood == 1:
216
292
  # Ensure parameter names match, otherwise return the joint distribution
217
293
  if set(self._likelihoods[0].get_parameter_names()) != set(self._distributions[0].get_parameter_names()):
218
294
  return self
219
- return self._add_constants_to_density(Posterior(self._likelihoods[0], self._distributions[0]))
295
+ reduced_distribution = Posterior(self._likelihoods[0], self._distributions[0])
296
+ reduced_distribution = self._add_constants_to_density(reduced_distribution)
297
+ reduced_FD_epsilon = self.FD_epsilon[par_name]
220
298
 
221
299
  # If exactly one distribution and no likelihoods its a Distribution
222
300
  if n_dist == 1 and n_likelihood == 0:
223
- return self._add_constants_to_density(self._distributions[0])
224
-
301
+ # Intentionally skip enabling FD here. If the user wants FD, they
302
+ # can enable it for this particular distribution before forming
303
+ # the joint distribution.
304
+ return self._add_constants_to_density(self._distributions[0])
305
+
225
306
  # If no distributions and exactly one likelihood its a Likelihood
226
307
  if n_likelihood == 1 and n_dist == 0:
227
- return self._likelihoods[0]
308
+ # This case seems to not happen in practice, but we include it for
309
+ # completeness.
310
+ reduced_distribution = self._likelihoods[0]
311
+ reduced_FD_epsilon = self.FD_epsilon[par_name]
312
+
313
+ if self.FD_enabled[par_name]:
314
+ reduced_distribution.enable_FD(epsilon=reduced_FD_epsilon)
315
+
316
+ return reduced_distribution
228
317
 
229
- # If only evaluated densities left return joint to ensure logd method is available
230
- if n_dist == 0 and n_likelihood == 0:
231
- return self
232
-
233
318
  def _add_constants_to_density(self, density: Density):
234
319
  """ Add the constants (evaluated densities) to a single density. Used when reducing to single density. """
235
320
 
@@ -274,7 +359,7 @@ class JointDistribution:
274
359
  if len(cond_vars) > 0:
275
360
  msg += f"|{cond_vars}"
276
361
  msg += ")"
277
-
362
+
278
363
  msg += "\n"
279
364
  msg += " Densities: \n"
280
365
 
@@ -203,13 +203,16 @@ class Sampler(ABC):
203
203
 
204
204
  self.set_state(state)
205
205
 
206
- def sample(self, Ns, batch_size=0, sample_path='./CUQI_samples/') -> 'Sampler':
206
+ def sample(self, Ns, Nt=1, batch_size=0, sample_path='./CUQI_samples/') -> 'Sampler':
207
207
  """ Sample Ns samples from the target density.
208
208
 
209
209
  Parameters
210
210
  ----------
211
211
  Ns : int
212
212
  The number of samples to draw.
213
+
214
+ Nt : int, optional, default=1
215
+ The thinning interval. If Nt >= 1, every Nt'th sample is stored. The larger Nt, the fewer samples are stored.
213
216
 
214
217
  batch_size : int, optional
215
218
  The batch size for saving samples to disk. If 0, no batching is used. If positive, samples are saved to disk in batches of the specified size.
@@ -233,7 +236,8 @@ class Sampler(ABC):
233
236
 
234
237
  # Store samples
235
238
  self._acc.append(acc)
236
- self._samples.append(self.current_point)
239
+ if (Nt > 0) and ((idx + 1) % Nt == 0):
240
+ self._samples.append(self.current_point)
237
241
 
238
242
  # display acc rate at progress bar
239
243
  pbar.set_postfix_str(f"acc rate: {np.mean(self._acc[-1-idx:]):.2%}")
@@ -248,7 +252,7 @@ class Sampler(ABC):
248
252
  return self
249
253
 
250
254
 
251
- def warmup(self, Nb, tune_freq=0.1) -> 'Sampler':
255
+ def warmup(self, Nb, Nt=1, tune_freq=0.1) -> 'Sampler':
252
256
  """ Warmup the sampler by drawing Nb samples.
253
257
 
254
258
  Parameters
@@ -256,6 +260,9 @@ class Sampler(ABC):
256
260
  Nb : int
257
261
  The number of samples to draw during warmup.
258
262
 
263
+ Nt : int, optional, default=1
264
+ The thinning interval. If Nt >= 1, every Nt'th sample is stored. The larger Nt, the fewer samples are stored.
265
+
259
266
  tune_freq : float, optional
260
267
  The frequency of tuning. Tuning is performed every tune_freq*Nb samples.
261
268
 
@@ -278,7 +285,8 @@ class Sampler(ABC):
278
285
 
279
286
  # Store samples
280
287
  self._acc.append(acc)
281
- self._samples.append(self.current_point)
288
+ if (Nt > 0) and ((idx + 1) % Nt == 0):
289
+ self._samples.append(self.current_point)
282
290
 
283
291
  # display acc rate at progress bar
284
292
  pbar.set_postfix_str(f"acc rate: {np.mean(self._acc[-1-idx:]):.2%}")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: CUQIpy
3
- Version: 1.3.0.post0.dev395
3
+ Version: 1.4.0.post0.dev13
4
4
  Summary: Computational Uncertainty Quantification for Inverse problems in Python
5
5
  Maintainer-email: "Nicolai A. B. Riis" <nabr@dtu.dk>, "Jakob S. Jørgensen" <jakj@dtu.dk>, "Amal M. Alghamdi" <amaal@dtu.dk>, Chao Zhang <chaz@dtu.dk>
6
6
  License: Apache License
@@ -1,6 +1,6 @@
1
1
  cuqi/__init__.py,sha256=LsGilhl-hBLEn6Glt8S_l0OJzAA1sKit_rui8h-D-p0,488
2
2
  cuqi/_messages.py,sha256=fzEBrZT2kbmfecBBPm7spVu7yHdxGARQB4QzXhJbCJ0,415
3
- cuqi/_version.py,sha256=0Bl2L2hPa3CoCMdXl3Uvvm9V61FnZBKRh8f9MyEAouw,510
3
+ cuqi/_version.py,sha256=n3wBTk9HDuwBoLqZlr2Jn26qcapqa2Tm0KOGV3YF5fY,509
4
4
  cuqi/config.py,sha256=wcYvz19wkeKW2EKCGIKJiTpWt5kdaxyt4imyRkvtTRA,526
5
5
  cuqi/diagnostics.py,sha256=5OrbJeqpynqRXOe5MtOKKhe7EAVdOEpHIqHnlMW9G_c,3029
6
6
  cuqi/array/__init__.py,sha256=-EeiaiWGNsE3twRS4dD814BIlfxEsNkTCZUc5gjOXb0,30
@@ -13,7 +13,7 @@ cuqi/data/cat.npz,sha256=9H9iJqkvlCCVZZ2IWMfwwfVHbShpQTkZo_WGr7rrp3k,406164
13
13
  cuqi/data/cookie.png,sha256=mr6wUeoIUc5VC2qYj8vafOmTbcRwz0fHz4IIPK9_PnE,984680
14
14
  cuqi/data/satellite.mat,sha256=a0Nz_Ak-Y0m360dH74pa_rpk-MhaQ91ftGTKhQX7I8g,16373
15
15
  cuqi/density/__init__.py,sha256=0zfVcPgqdqiPkss5n_WP_PUt-G3ovHXjokhqEKIlLwA,48
16
- cuqi/density/_density.py,sha256=BG7gtP0cbFYLVgjYQGkNAhM95PR5ocBVLKRlOVX2PyM,7253
16
+ cuqi/density/_density.py,sha256=Pfcq8b9MuTAuXxVwORRyNru_KIAFN1yHp2Y1yNwdyrg,7467
17
17
  cuqi/distribution/__init__.py,sha256=f-HM-SUrvPO66_FAJ6k4TffBq4H94OusRMDOJgcJU2w,779
18
18
  cuqi/distribution/_beta.py,sha256=QlibnuHNcvWjl-du5aRc9QuzS3n4PsyD_8Nc47w-E0Q,2903
19
19
  cuqi/distribution/_cauchy.py,sha256=Qwi21WkwUBnBkLbhR-yCGO0tQ_U_3mmvR0pDMPPPB5c,3296
@@ -24,7 +24,7 @@ cuqi/distribution/_gamma.py,sha256=VcvBJS51N-MxuX42r9L2j2QYRlzhdgAtQ6Wa5IFO_YE,3
24
24
  cuqi/distribution/_gaussian.py,sha256=3L1L_3W6i6YuPQ8vnFmju5QsvkLlg4VsgCnj11lYBUE,32977
25
25
  cuqi/distribution/_gmrf.py,sha256=OwId8qQWEtmC2fxVhL4iBHZnc8ZCrZzfV6yGXDE3k30,9522
26
26
  cuqi/distribution/_inverse_gamma.py,sha256=oPJuiYp3O1m547pmmIz9OWesky9YpwLTHT7-9MmcYss,3159
27
- cuqi/distribution/_joint_distribution.py,sha256=gBWDb9Aj27m74mSsm9Jj_0mSu0pcEk9Cwdxrzybiwx8,16710
27
+ cuqi/distribution/_joint_distribution.py,sha256=ALOnQsIrzE8Rx_FYOs4f276u4QZQeN_e0CLC7CJpb-E,20396
28
28
  cuqi/distribution/_laplace.py,sha256=5exLvlzJm2AgfvZ3KUSkjfwlGwwbsktBxP8z0iLMik8,1401
29
29
  cuqi/distribution/_lmrf.py,sha256=rdGoQ-fPe1oW6Z29P-l3woq0NX3_RxUQ2rzm1VzemNM,3290
30
30
  cuqi/distribution/_lognormal.py,sha256=8_hOFQ3iu88ujX8vxmfVEZ0fdmlhTY98PlG5PasPjEg,2612
@@ -54,7 +54,7 @@ cuqi/experimental/mcmc/_laplace_approximation.py,sha256=I5ZLtU0lA34YflRbqxKi5UgJ
54
54
  cuqi/experimental/mcmc/_mh.py,sha256=MXo0ahXP4KGFkaY4HtvcBE-TMQzsMlTmLKzSvpz7drU,2941
55
55
  cuqi/experimental/mcmc/_pcn.py,sha256=wqJBZLuRFSwxihaI53tumAg6AWVuceLMOmXssTetd1A,3374
56
56
  cuqi/experimental/mcmc/_rto.py,sha256=O_bBeQbaYy5im5LKAhwin3uRCJpyFPcKVDH8GxriXEY,17196
57
- cuqi/experimental/mcmc/_sampler.py,sha256=lClOyxTnHpjohb7hQcO9SSYMvOGxZMXWK_SrEsTTsvw,20570
57
+ cuqi/experimental/mcmc/_sampler.py,sha256=7_a9i6A7AX3NNz7qK1jTsEYt6bFCUR5WK464KfH_Kvc,21034
58
58
  cuqi/geometry/__init__.py,sha256=Tz1WGzZBY-QGH3c0GiyKm9XHN8MGGcnU6TUHLZkzB3o,842
59
59
  cuqi/geometry/_geometry.py,sha256=W-oQTZPelVS7fN9qZj6bNBuh-yY0eqOHJ39UwB-WmQY,47562
60
60
  cuqi/implicitprior/__init__.py,sha256=6Fl4Lmld8ikg9sW9tReKRGTCJC6_WCTExHaYuIv34nM,323
@@ -93,8 +93,8 @@ cuqi/testproblem/_testproblem.py,sha256=EJWG_zXUtmo6GlHBZFqHlRpDC_48tE0XZEu0_C66
93
93
  cuqi/utilities/__init__.py,sha256=d5QXRzmI6EchS9T4b7eTezSisPWuWklO8ey4YBx9kI0,569
94
94
  cuqi/utilities/_get_python_variable_name.py,sha256=wxpCaj9f3ZtBNqlGmmuGiITgBaTsY-r94lUIlK6UAU4,2043
95
95
  cuqi/utilities/_utilities.py,sha256=R7BdNysrE36a4D729DvfrTisWY4paP5nfqdkQxSX3Mg,18431
96
- cuqipy-1.3.0.post0.dev395.dist-info/licenses/LICENSE,sha256=kJWRPrtRoQoZGXyyvu50Uc91X6_0XRaVfT0YZssicys,10799
97
- cuqipy-1.3.0.post0.dev395.dist-info/METADATA,sha256=grtqGn8cVeCBwMxwOyJ1OjN6I1tXqZfvUie8KZU9y_Q,18624
98
- cuqipy-1.3.0.post0.dev395.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
99
- cuqipy-1.3.0.post0.dev395.dist-info/top_level.txt,sha256=AgmgMc6TKfPPqbjV0kvAoCBN334i_Lwwojc7HE3ZwD0,5
100
- cuqipy-1.3.0.post0.dev395.dist-info/RECORD,,
96
+ cuqipy-1.4.0.post0.dev13.dist-info/licenses/LICENSE,sha256=kJWRPrtRoQoZGXyyvu50Uc91X6_0XRaVfT0YZssicys,10799
97
+ cuqipy-1.4.0.post0.dev13.dist-info/METADATA,sha256=ViQ-17Ja0AYhdceHup8JwJPCXesTy5af9iCE7gjJqcU,18623
98
+ cuqipy-1.4.0.post0.dev13.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
99
+ cuqipy-1.4.0.post0.dev13.dist-info/top_level.txt,sha256=AgmgMc6TKfPPqbjV0kvAoCBN334i_Lwwojc7HE3ZwD0,5
100
+ cuqipy-1.4.0.post0.dev13.dist-info/RECORD,,