CUQIpy 1.0.0.post0.dev229__py3-none-any.whl → 1.0.0.post0.dev305__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of CUQIpy might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: CUQIpy
3
- Version: 1.0.0.post0.dev229
3
+ Version: 1.0.0.post0.dev305
4
4
  Summary: Computational Uncertainty Quantification for Inverse problems in Python
5
5
  Maintainer-email: "Nicolai A. B. Riis" <nabr@dtu.dk>, "Jakob S. Jørgensen" <jakj@dtu.dk>, "Amal M. Alghamdi" <amaal@dtu.dk>, Chao Zhang <chaz@dtu.dk>
6
6
  License: Apache License
@@ -1,6 +1,6 @@
1
1
  cuqi/__init__.py,sha256=LsGilhl-hBLEn6Glt8S_l0OJzAA1sKit_rui8h-D-p0,488
2
2
  cuqi/_messages.py,sha256=fzEBrZT2kbmfecBBPm7spVu7yHdxGARQB4QzXhJbCJ0,415
3
- cuqi/_version.py,sha256=qSCcU546LTLb2CnsgdAFPFHEfSeVUxNMie_SdR9XFeY,510
3
+ cuqi/_version.py,sha256=Lro7539e-VFksYWNf7xFPv5bxAlSPs5DaTne7ms-G4g,510
4
4
  cuqi/config.py,sha256=wcYvz19wkeKW2EKCGIKJiTpWt5kdaxyt4imyRkvtTRA,526
5
5
  cuqi/diagnostics.py,sha256=5OrbJeqpynqRXOe5MtOKKhe7EAVdOEpHIqHnlMW9G_c,3029
6
6
  cuqi/array/__init__.py,sha256=-EeiaiWGNsE3twRS4dD814BIlfxEsNkTCZUc5gjOXb0,30
@@ -33,14 +33,14 @@ cuqi/distribution/_posterior.py,sha256=zAfL0GECxekZ2lBt1W6_LN0U_xskMwK4VNce5xAF7
33
33
  cuqi/distribution/_uniform.py,sha256=7xJmCZH_LPhuGkwEDGh-_CTtzcWKrXMOxtTJUFb7Ydo,1607
34
34
  cuqi/experimental/__init__.py,sha256=vhZvyMX6rl8Y0haqCzGLPz6PSUKyu75XMQbeDHqTTrw,83
35
35
  cuqi/experimental/mcmc/__init__.py,sha256=UqoyPWNQt4ZGIgc9Buhl5gf3toAxLjXLyQ7DieDQlRw,384
36
- cuqi/experimental/mcmc/_cwmh.py,sha256=yRlTk5a1QYfH3JyCecfOOTeDf-4-tmJ3Tl2Bc3pyp1Y,7336
37
- cuqi/experimental/mcmc/_hmc.py,sha256=qqAyoAajLE_JenYMgAbD3tknuEf75AJu-ufF69GKGk4,19384
38
- cuqi/experimental/mcmc/_langevin_algorithm.py,sha256=MX48u3GYgCckB6Q5h5kXr_qdIaLQH2toOG5u29OY7gk,8245
39
- cuqi/experimental/mcmc/_laplace_approximation.py,sha256=7reeOnDY77WnOwqYls5WStftHgylwCNVodudRroApF0,5812
40
- cuqi/experimental/mcmc/_mh.py,sha256=aIV1Ntq0EAq3QJ1_X-DbP7eDAL-d_Or7d3RUO-R48I4,3090
41
- cuqi/experimental/mcmc/_pcn.py,sha256=m7pR266uUJQociOe_CpUUlKHkfm8g--JfRWaQA2IKis,4364
42
- cuqi/experimental/mcmc/_rto.py,sha256=jSPznr34XPfWM6LysWIiN4hE-vtyti3cHyvzy9ruykg,11349
43
- cuqi/experimental/mcmc/_sampler.py,sha256=_5Uo2F-Mta46w3lo7WBVNwvTLYhES_BzMTJrKxA00c8,14861
36
+ cuqi/experimental/mcmc/_cwmh.py,sha256=dbvmy6Fyr_xszbO3YmYcRsDaRRtQfPimLo6rbp0II6M,6898
37
+ cuqi/experimental/mcmc/_hmc.py,sha256=0sZMHtnNFGGtQdzpx-cgqA0xyfvGy7r4K62RH3AQNa4,19285
38
+ cuqi/experimental/mcmc/_langevin_algorithm.py,sha256=n6WRQooKuUDjmqF-CtpcSNFDvaHCgLKhWxX-hi7h_ZA,8224
39
+ cuqi/experimental/mcmc/_laplace_approximation.py,sha256=xrlQbRaoJ8Rx1iWvSqrxX40OsirmsRuEO3R4ewuZdGM,5702
40
+ cuqi/experimental/mcmc/_mh.py,sha256=DUoZJ1cFnHkupmTV6-GIbINLXhG4stps44RVlkuGQ8s,2625
41
+ cuqi/experimental/mcmc/_pcn.py,sha256=T4T32mfoii3k6Jfz0qxPQbwdh6wdVOxttiEP7NWaZzg,3386
42
+ cuqi/experimental/mcmc/_rto.py,sha256=49RopzzbfRp6c_7WTFgF7e602hb4YHVuls9YWRfCmGk,10081
43
+ cuqi/experimental/mcmc/_sampler.py,sha256=g9c1ds5PoXr4LDYF3h_1wFI1M6m2nYp1poVu0RHrwh8,19720
44
44
  cuqi/geometry/__init__.py,sha256=Tz1WGzZBY-QGH3c0GiyKm9XHN8MGGcnU6TUHLZkzB3o,842
45
45
  cuqi/geometry/_geometry.py,sha256=WYFC-4_VBTW73b2ldsnfGYKvdSiCE8plr89xTSmkadg,46804
46
46
  cuqi/implicitprior/__init__.py,sha256=ZRZ9fgxgEl5n0A9F7WCl1_jid-GUiC8ZLkyTmGQmFlY,100
@@ -77,8 +77,8 @@ cuqi/testproblem/_testproblem.py,sha256=x769LwwRdJdzIiZkcQUGb_5-vynNTNALXWKato7s
77
77
  cuqi/utilities/__init__.py,sha256=T4tLsC215MknBCsw_C0Qeeg_ox26aDUrCA5hbWvNQkU,387
78
78
  cuqi/utilities/_get_python_variable_name.py,sha256=QwlBVj2koJRA8s8pWd554p7-ElcI7HUwY32HknaR92E,1827
79
79
  cuqi/utilities/_utilities.py,sha256=MWAqV6L5btMpWwlUzrZYuV2VeSpfTbOaLRMRkuw2WIA,8509
80
- CUQIpy-1.0.0.post0.dev229.dist-info/LICENSE,sha256=kJWRPrtRoQoZGXyyvu50Uc91X6_0XRaVfT0YZssicys,10799
81
- CUQIpy-1.0.0.post0.dev229.dist-info/METADATA,sha256=-LQQOopcYpRJTp_ZhyH2H97JxI2jLfgsIS1AzYs1FNU,18393
82
- CUQIpy-1.0.0.post0.dev229.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
83
- CUQIpy-1.0.0.post0.dev229.dist-info/top_level.txt,sha256=AgmgMc6TKfPPqbjV0kvAoCBN334i_Lwwojc7HE3ZwD0,5
84
- CUQIpy-1.0.0.post0.dev229.dist-info/RECORD,,
80
+ CUQIpy-1.0.0.post0.dev305.dist-info/LICENSE,sha256=kJWRPrtRoQoZGXyyvu50Uc91X6_0XRaVfT0YZssicys,10799
81
+ CUQIpy-1.0.0.post0.dev305.dist-info/METADATA,sha256=dqQXHzSfLCCBGnno4jP-AkVQDWa0u_RJR-QGFUXZKk8,18393
82
+ CUQIpy-1.0.0.post0.dev305.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
83
+ CUQIpy-1.0.0.post0.dev305.dist-info/top_level.txt,sha256=AgmgMc6TKfPPqbjV0kvAoCBN334i_Lwwojc7HE3ZwD0,5
84
+ CUQIpy-1.0.0.post0.dev305.dist-info/RECORD,,
cuqi/_version.py CHANGED
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2024-05-21T12:48:50+0200",
11
+ "date": "2024-05-27T07:34:21+0200",
12
12
  "dirty": false,
13
13
  "error": null,
14
- "full-revisionid": "eb9519734f2558f66772895e41f2cd0c3cd61767",
15
- "version": "1.0.0.post0.dev229"
14
+ "full-revisionid": "1274ccfff1cb9663b547dd2aeb9c86c72dccb2ae",
15
+ "version": "1.0.0.post0.dev305"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -67,16 +67,21 @@ class CWMHNew(ProposalBasedSamplerNew):
67
67
  samples = sampler.sample(2000).get_samples()
68
68
 
69
69
  """
70
- def __init__(self, target: cuqi.density.Density, proposal=None, scale=1,
70
+
71
+ _STATE_KEYS = ProposalBasedSamplerNew._STATE_KEYS.union(['_scale_temp'])
72
+
73
+ def __init__(self, target:cuqi.density.Density=None, proposal=None, scale=1,
71
74
  initial_point=None, **kwargs):
72
75
  super().__init__(target, proposal=proposal, scale=scale,
73
76
  initial_point=initial_point, **kwargs)
77
+
78
+ def _initialize(self):
79
+ if isinstance(self.scale, Number):
80
+ self.scale = np.ones(self.dim)*self.scale
81
+ self._acc = [np.ones((self.dim))] # Overwrite acc from ProposalBasedSamplerNew with list of arrays
74
82
 
75
- # set initial scale
76
- self.scale = scale
77
-
78
- # set initial acceptance rate
79
- self._acc = [np.ones((self.dim))]
83
+ # Handling of temporary scale parameter due to possible bug in old CWMH
84
+ self._scale_temp = self.scale.copy()
80
85
 
81
86
  @property
82
87
  def scale(self):
@@ -86,53 +91,35 @@ class CWMHNew(ProposalBasedSamplerNew):
86
91
  @scale.setter
87
92
  def scale(self, value):
88
93
  """ Set the scale parameter. """
89
- if isinstance(value, Number):
90
- self._scale = np.ones(self.dim)*value
91
- elif isinstance(value, np.ndarray):
92
- self._scale = value
93
- self._scale_temp = self._scale.copy()
94
+ if self._is_initialized and isinstance(value, Number):
95
+ value = np.ones(self.dim)*value
96
+ self._scale = value
94
97
 
95
98
  def validate_target(self):
96
99
  if not isinstance(self.target, cuqi.density.Density):
97
100
  raise ValueError(
98
101
  "Target should be an instance of "+\
99
102
  f"{cuqi.density.Density.__class__.__name__}")
100
-
101
- @ProposalBasedSamplerNew.proposal.setter
102
- # TODO. Check if we can refactor this.
103
- # We can work with a validate_proposal method instead?
104
- def proposal(self, value):
105
- fail_msg = "Proposal should be either None, "+\
106
- f"{cuqi.distribution.Distribution.__class__.__name__} "+\
107
- "conditioned only on 'location' and 'scale', lambda function, "+\
108
- f"or {cuqi.distribution.Normal.__class__.__name__} conditioned "+\
109
- "only on 'mean' and 'std'"
110
-
111
- if value is None:
103
+
104
+ def validate_proposal(self):
105
+ if not isinstance(self.proposal, cuqi.distribution.Distribution):
106
+ raise ValueError("Proposal must be a cuqi.distribution.Distribution object")
107
+ if not self.proposal.is_symmetric:
108
+ raise ValueError("Proposal must be symmetric")
109
+
110
+ @property
111
+ def proposal(self):
112
+ if self._proposal is None:
112
113
  self._proposal = cuqi.distribution.Normal(
113
114
  mean=lambda location: location,
114
115
  std=lambda scale: scale,
115
116
  geometry=self.dim,
116
117
  )
117
-
118
- elif isinstance(value, cuqi.distribution.Distribution) and sorted(
119
- value.get_conditioning_variables()
120
- ) == ["location", "scale"]:
121
- self._proposal = value
122
-
123
- elif isinstance(value, cuqi.distribution.Normal) and sorted(
124
- value.get_conditioning_variables()
125
- ) == ["mean", "std"]:
126
- self._proposal = value(
127
- mean=lambda location: location, std=lambda scale: scale
128
- )
129
-
130
- elif not isinstance(value, cuqi.distribution.Distribution) and callable(
131
- value):
132
- self._proposal = value
133
-
134
- else:
135
- raise ValueError(fail_msg)
118
+ return self._proposal
119
+
120
+ @proposal.setter
121
+ def proposal(self, value):
122
+ self._proposal = value
136
123
 
137
124
  def step(self):
138
125
  # Initialize x_t which is used to store the current CWMH sample
@@ -95,7 +95,7 @@ class NUTSNew(SamplerNew):
95
95
  'epsilon_list',
96
96
  'epsilon_bar_list'})
97
97
 
98
- def __init__(self, target, initial_point=None, max_depth=15,
98
+ def __init__(self, target=None, initial_point=None, max_depth=15,
99
99
  step_size=None, opt_acc_rate=0.6, **kwargs):
100
100
  super().__init__(target, initial_point=initial_point, **kwargs)
101
101
 
@@ -103,9 +103,6 @@ class NUTSNew(SamplerNew):
103
103
  self.max_depth = max_depth
104
104
  self.step_size = step_size
105
105
  self.opt_acc_rate = opt_acc_rate
106
-
107
- # Set current point
108
- self.current_point = self.initial_point
109
106
 
110
107
  # Initialize epsilon and epsilon_bar
111
108
  # epsilon is the step size used in the current iteration
@@ -115,12 +112,37 @@ class NUTSNew(SamplerNew):
115
112
  self._epsilon_bar = None
116
113
  self._H_bar = None
117
114
 
115
+ # Extra parameters for tuning
116
+ self._n_alpha = None
117
+ self._alpha = None
118
+
119
+
120
+ def _initialize(self):
121
+
118
122
  # Arrays to store acceptance rate
119
- self._acc = [None]
123
+ self._acc = [None] # Overwrites acc from SamplerNew. TODO. Check if this is necessary
124
+
125
+ self._alpha = 0 # check if meaningful value
126
+ self._n_alpha = 0 # check if meaningful value
127
+
128
+ self.current_target_logd, self.current_target_grad = self._nuts_target(self.current_point)
129
+
130
+ # parameters dual averaging
131
+ if self.step_size is None:
132
+ self._epsilon = self._FindGoodEpsilon()
133
+ else:
134
+ self._epsilon = self.step_size
135
+ self._epsilon_bar = "unset"
136
+
137
+ # Parameter mu, does not change during the run
138
+ self._mu = np.log(10*self._epsilon)
139
+
140
+ self._H_bar = 0
120
141
 
121
142
  # NUTS run diagnostic:
122
143
  # number of tree nodes created each NUTS iteration
123
144
  self._num_tree_node = 0
145
+
124
146
  # Create lists to store NUTS run diagnostics
125
147
  self._create_run_diagnostic_attributes()
126
148
 
@@ -176,9 +198,9 @@ class NUTSNew(SamplerNew):
176
198
  except:
177
199
  raise ValueError('Target must have logd and gradient methods.')
178
200
 
179
- def reset(self):
201
+ def reinitialize(self):
180
202
  # Call the parent reset method
181
- super().reset()
203
+ super().reinitialize()
182
204
  # Reset NUTS run diagnostic attributes
183
205
  self._reset_run_diagnostic_attributes()
184
206
 
@@ -275,10 +297,6 @@ class NUTSNew(SamplerNew):
275
297
  np.exp(eta*np.log(self._epsilon) +(1-eta)*np.log(self._epsilon_bar))
276
298
 
277
299
  def _pre_warmup(self):
278
- super()._pre_warmup()
279
-
280
- self.current_target_logd, self.current_target_grad =\
281
- self._nuts_target(self.current_point)
282
300
 
283
301
  # Set up tuning parameters (only first time tuning is called)
284
302
  # Note:
@@ -289,32 +307,14 @@ class NUTSNew(SamplerNew):
289
307
  # Parameters that does not change during the run
290
308
  # self._mu
291
309
 
292
- if self._epsilon is None:
293
- # parameters dual averaging
294
- self._epsilon = self._FindGoodEpsilon()
295
- # Parameter mu, does not change during the run
296
- self._mu = np.log(10*self._epsilon)
297
-
298
- if self._epsilon_bar is None: # Initial value of epsilon_bar
310
+ if self._epsilon_bar == "unset": # Initial value of epsilon_bar for tuning
299
311
  self._epsilon_bar = 1
300
312
 
301
- if self._H_bar is None: # Initial value of H_bar
302
- self._H_bar = 0
303
-
304
313
  def _pre_sample(self):
305
- super()._pre_sample()
306
314
 
307
- self.current_target_logd, self.current_target_grad =\
308
- self._nuts_target(self.current_point)
309
-
310
- # Set up epsilon and epsilon_bar if not set
311
- if self._epsilon is None:
312
- if self.step_size is None:
313
- step_size = self._FindGoodEpsilon()
314
- else:
315
- step_size = self.step_size
316
- self._epsilon = step_size
317
- self._epsilon_bar = step_size
315
+ if self._epsilon_bar == "unset": # Initial value of epsilon_bar for sampling
316
+ self._epsilon_bar = self._epsilon
317
+
318
318
 
319
319
  #=========================================================================
320
320
  def _nuts_target(self, x): # returns logposterior tuple evaluation-gradient
@@ -467,4 +467,4 @@ class NUTSNew(SamplerNew):
467
467
  # Store the step size used in iteration k
468
468
  self.epsilon_list.append(eps)
469
469
  # Store the step size suggestion during adaptation in iteration k
470
- self.epsilon_bar_list.append(eps_bar)
470
+ self.epsilon_bar_list.append(eps_bar)
@@ -63,15 +63,16 @@ class ULANew(SamplerNew): # Refactor to Proposal-based sampler?
63
63
 
64
64
  _STATE_KEYS = SamplerNew._STATE_KEYS.union({'current_target_logd', 'scale', 'current_target_grad'})
65
65
 
66
- def __init__(self, target, scale=1.0, **kwargs):
66
+ def __init__(self, target=None, scale=1.0, **kwargs):
67
67
 
68
68
  super().__init__(target, **kwargs)
69
69
 
70
- self.scale = scale
71
- self.current_point = self.initial_point
70
+ self.initial_scale = scale
71
+
72
+ def _initialize(self):
73
+ self.scale = self.initial_scale
72
74
  self.current_target_logd = self.target.logd(self.current_point)
73
75
  self.current_target_grad = self.target.gradient(self.current_point)
74
- self._acc = [1] # TODO. Check if we need this
75
76
 
76
77
  def validate_target(self):
77
78
  try:
@@ -53,17 +53,13 @@ class UGLANew(SamplerNew):
53
53
 
54
54
  super().__init__(target=target, initial_point=initial_point, **kwargs)
55
55
 
56
- if initial_point is None: #TODO: Replace later with a getter
57
- self.initial_point = np.zeros(self.dim)
58
- self._samples = [self.initial_point]
59
-
60
- self.current_point = self.initial_point
61
- self._acc = [1] # TODO. Check if we need this
62
-
63
56
  # Parameters
64
57
  self.maxit = maxit
65
58
  self.tol = tol
66
59
  self.beta = beta
60
+
61
+ def _initialize(self):
62
+ self._precompute()
67
63
 
68
64
  @property
69
65
  def prior(self):
@@ -81,8 +77,8 @@ class UGLANew(SamplerNew):
81
77
  def data(self):
82
78
  return self.target.data
83
79
 
84
- def _pre_warmup(self):
85
- super()._pre_warmup()
80
+ def _precompute(self):
81
+
86
82
  D = self.prior._diff_op
87
83
  n = D.shape[0]
88
84
 
@@ -121,9 +117,6 @@ class UGLANew(SamplerNew):
121
117
  return out
122
118
  self.M = M
123
119
 
124
- def _pre_sample(self):
125
- self._pre_warmup()
126
-
127
120
  def step(self):
128
121
  # Update Laplace approximation
129
122
  self._L2 = self.Lk_fun(self.current_point)
@@ -157,3 +150,8 @@ class UGLANew(SamplerNew):
157
150
  # Check that prior is LMRF
158
151
  if not isinstance(self.prior, cuqi.distribution.LMRF):
159
152
  raise ValueError('Unadjusted Gaussian Laplace approximation (UGLA) requires LMRF prior')
153
+
154
+ @property
155
+ def _default_initial_point(self):
156
+ """ Get the default initial point for the sampler. Defaults to an array of zeros. """
157
+ return np.zeros(self.dim)
@@ -1,7 +1,6 @@
1
1
  import numpy as np
2
2
  import cuqi
3
3
  from cuqi.experimental.mcmc import ProposalBasedSamplerNew
4
- from cuqi.array import CUQIarray
5
4
 
6
5
 
7
6
  class MHNew(ProposalBasedSamplerNew):
@@ -27,25 +26,19 @@ class MHNew(ProposalBasedSamplerNew):
27
26
 
28
27
  def __init__(self, target, proposal=None, scale=1, **kwargs):
29
28
  super().__init__(target, proposal=proposal, scale=scale, **kwargs)
29
+
30
+ def _initialize(self):
30
31
  # Due to a bug? in old MH, we must keep track of this extra variable to match behavior.
31
- self._scale_temp = self.scale
32
+ self._scale_temp = self.scale
32
33
 
33
34
  def validate_target(self):
34
35
  pass # All targets are valid
35
36
 
36
- @ProposalBasedSamplerNew.proposal.setter # TODO. Check if we can refactor this. We can work with a validate_proposal method instead?
37
- def proposal(self, value):
38
- fail_msg = "Proposal should be either None, symmetric cuqi.distribution.Distribution or a lambda function."
39
-
40
- if value is None:
41
- self._proposal = cuqi.distribution.Gaussian(np.zeros(self.dim), 1)
42
- elif not isinstance(value, cuqi.distribution.Distribution) and callable(value):
43
- raise NotImplementedError(fail_msg)
44
- elif isinstance(value, cuqi.distribution.Distribution) and value.is_symmetric:
45
- self._proposal = value
46
- else:
47
- raise ValueError(fail_msg)
48
- self._proposal.geometry = self.target.geometry
37
+ def validate_proposal(self):
38
+ if not isinstance(self.proposal, cuqi.distribution.Distribution):
39
+ raise ValueError("Proposal must be a cuqi.distribution.Distribution object")
40
+ if not self.proposal.is_symmetric:
41
+ raise ValueError("Proposal must be symmetric")
49
42
 
50
43
  def step(self):
51
44
  # propose state
@@ -7,29 +7,25 @@ class PCNNew(SamplerNew): # Refactor to Proposal-based sampler?
7
7
 
8
8
  _STATE_KEYS = SamplerNew._STATE_KEYS.union({'scale', 'current_likelihood_logd', 'lambd'})
9
9
 
10
- def __init__(self, target, scale=1.0, **kwargs):
10
+ def __init__(self, target=None, scale=1.0, **kwargs):
11
11
 
12
12
  super().__init__(target, **kwargs)
13
+ self.initial_scale = scale
13
14
 
14
- self.scale = scale
15
- self.current_point = self.initial_point
15
+ def _initialize(self):
16
+ self.scale = self.initial_scale
16
17
  self.current_likelihood_logd = self._loglikelihood(self.current_point)
17
18
 
18
- self._acc = [1] # TODO. Check if we need this
19
-
20
19
  # parameters used in the Robbins-Monro recursion for tuning the scale parameter
21
20
  # see details and reference in the tune method
22
21
  self.lambd = self.scale
23
22
  self.star_acc = 0.44 #TODO: 0.234 # target acceptance rate
24
23
 
25
24
  def validate_target(self):
26
- try:
27
- if isinstance(self.prior, (cuqi.distribution.Gaussian, cuqi.distribution.Normal)):
28
- pass
29
- else:
30
- raise ValueError("The prior distribution of the target need to be Gaussian")
31
- except AttributeError:
32
- raise ValueError("The target need to have a prior distribution")
25
+ if not isinstance(self.target, cuqi.distribution.Posterior):
26
+ raise ValueError(f"To initialize an object of type {self.__class__}, 'target' need to be of type 'cuqi.distribution.Posterior'.")
27
+ if not isinstance(self.prior, (cuqi.distribution.Gaussian, cuqi.distribution.Normal)):
28
+ raise ValueError("The prior distribution of the target need to be Gaussian")
33
29
 
34
30
  def step(self):
35
31
  # propose state
@@ -55,30 +51,14 @@ class PCNNew(SamplerNew): # Refactor to Proposal-based sampler?
55
51
 
56
52
  @property
57
53
  def prior(self):
58
- if isinstance(self.target, cuqi.distribution.Posterior):
59
- return self.target.prior
60
- elif isinstance(self.target,tuple) and len(self.target)==2:
61
- return self.target[1]
54
+ return self.target.prior
62
55
 
63
56
  @property
64
57
  def likelihood(self):
65
- if isinstance(self.target, cuqi.distribution.Posterior):
66
- return self.target.likelihood
67
- elif isinstance(self.target,tuple) and len(self.target)==2:
68
- return self.target[0]
69
-
70
- @SamplerNew.target.setter
71
- def target(self, value):
72
- if isinstance(value, cuqi.distribution.Posterior):
73
- self._target = value
74
- self._loglikelihood = lambda x : self.likelihood.logd(x)
75
- elif isinstance(value,tuple) and len(value)==2 and \
76
- (isinstance(value[0], cuqi.likelihood.Likelihood) or isinstance(value[0], cuqi.likelihood.UserDefinedLikelihood)) and \
77
- isinstance(value[1], cuqi.distribution.Distribution):
78
- self._target = value
79
- self._loglikelihood = lambda x : self.likelihood.logd(x)
80
- else:
81
- raise ValueError(f"To initialize an object of type {self.__class__}, 'target' need to be of type 'cuqi.distribution.Posterior'.")
58
+ return self.target.likelihood
59
+
60
+ def _loglikelihood(self, x):
61
+ return self.likelihood.logd(x)
82
62
 
83
63
  @property
84
64
  def dim(self): # TODO. Check if we need this. Implemented in base class
@@ -106,4 +86,4 @@ class PCNNew(SamplerNew): # Refactor to Proposal-based sampler?
106
86
  self.lambd = np.exp(np.log(self.lambd) + zeta*(hat_acc-self.star_acc))
107
87
 
108
88
  # update scale parameter
109
- self.scale = min(self.lambd, 1)
89
+ self.scale = min(self.lambd, 1)
@@ -5,7 +5,6 @@ import numpy as np
5
5
  import cuqi
6
6
  from cuqi.solver import CGLS, FISTA
7
7
  from cuqi.experimental.mcmc import SamplerNew
8
- from cuqi.array import CUQIarray
9
8
 
10
9
 
11
10
  class LinearRTONew(SamplerNew):
@@ -44,21 +43,17 @@ class LinearRTONew(SamplerNew):
44
43
  An example is shown in demos/demo31_callback.py.
45
44
 
46
45
  """
47
- def __init__(self, target, initial_point=None, maxit=10, tol=1e-6, **kwargs):
46
+ def __init__(self, target=None, initial_point=None, maxit=10, tol=1e-6, **kwargs):
48
47
 
49
48
  super().__init__(target=target, initial_point=initial_point, **kwargs)
50
49
 
51
- if initial_point is None: #TODO: Replace later with a getter
52
- self.initial_point = np.zeros(self.dim)
53
- self._samples = [self.initial_point]
54
-
55
- self.current_point = self.initial_point
56
- self._acc = [1] # TODO. Check if we need this
57
-
58
50
  # Other parameters
59
51
  self.maxit = maxit
60
52
  self.tol = tol
61
53
 
54
+ def _initialize(self):
55
+ self._precompute()
56
+
62
57
  @property
63
58
  def prior(self):
64
59
  return self.target.prior
@@ -81,41 +76,7 @@ class LinearRTONew(SamplerNew):
81
76
  @property
82
77
  def data(self):
83
78
  return self.target.data
84
-
85
- @SamplerNew.target.setter
86
- def target(self, value):
87
- """ Set the target density. Runs validation of the target. """
88
- # Accept tuple of inputs and construct posterior
89
- if isinstance(value, tuple) and len(value) == 5:
90
- # Structure (data, model, L_sqrtprec, P_mean, P_sqrtprec)
91
- data = value[0]
92
- model = value[1]
93
- L_sqrtprec = value[2]
94
- P_mean = value[3]
95
- P_sqrtprec = value[4]
96
-
97
- # If numpy matrix convert to CUQI model
98
- if isinstance(model, np.ndarray) and len(model.shape) == 2:
99
- model = cuqi.model.LinearModel(model)
100
-
101
- # Check model input
102
- if not isinstance(model, cuqi.model.LinearModel):
103
- raise TypeError("Model needs to be cuqi.model.LinearModel or matrix")
104
-
105
- # Likelihood
106
- L = cuqi.distribution.Gaussian(model, sqrtprec=L_sqrtprec).to_likelihood(data)
107
-
108
- # Prior TODO: allow multiple priors stacked
109
- #if isinstance(P_mean, list) and isinstance(P_sqrtprec, list):
110
- # P = cuqi.distribution.JointGaussianSqrtPrec(P_mean, P_sqrtprec)
111
- #else:
112
- P = cuqi.distribution.Gaussian(P_mean, sqrtprec=P_sqrtprec)
113
-
114
- # Construct posterior
115
- value = cuqi.distribution.Posterior(L, P)
116
- super(LinearRTONew, type(self)).target.fset(self, value)
117
- self._precompute()
118
-
79
+
119
80
  def _precompute(self):
120
81
  L1 = [likelihood.distribution.sqrtprec for likelihood in self.likelihoods]
121
82
  L2 = self.prior.sqrtprec
@@ -188,6 +149,11 @@ class LinearRTONew(SamplerNew):
188
149
 
189
150
  if not hasattr(self.prior, "sqrtprecTimesMean"):
190
151
  raise TypeError("Prior must contain a sqrtprecTimesMean attribute")
152
+
153
+ @property
154
+ def _default_initial_point(self):
155
+ """ Get the default initial point for the sampler. Defaults to an array of zeros. """
156
+ return np.zeros(self.dim)
191
157
 
192
158
  class RegularizedLinearRTONew(LinearRTONew):
193
159
  """
@@ -231,15 +197,22 @@ class RegularizedLinearRTONew(LinearRTONew):
231
197
  self.stepsize = stepsize
232
198
  self.abstol = abstol
233
199
  self.adaptive = adaptive
234
- self.proximal = target.prior.proximal
235
- self._stepsize = self._choose_stepsize()
236
200
  self.maxit = maxit
237
201
 
238
- @LinearRTONew.target.setter
239
- def target(self, value):
240
- if not callable(value.prior.proximal):
241
- raise TypeError("Projector needs to be callable")
242
- return super(RegularizedLinearRTONew, type(self)).target.fset(self, value)
202
+ def _initialize(self):
203
+ super()._initialize()
204
+ self._stepsize = self._choose_stepsize()
205
+
206
+ @property
207
+ def proximal(self):
208
+ return self.target.prior.proximal
209
+
210
+ def validate_target(self):
211
+ super().validate_target()
212
+ if not isinstance(self.target.prior, (cuqi.implicitprior.RegularizedGaussian, cuqi.implicitprior.RegularizedGMRF)):
213
+ raise TypeError("Prior needs to be RegularizedGaussian or RegularizedGMRF")
214
+ if not callable(self.proximal):
215
+ raise TypeError("Proximal needs to be callable")
243
216
 
244
217
  def _choose_stepsize(self):
245
218
  if isinstance(self.stepsize, str):
@@ -20,7 +20,23 @@ class SamplerNew(ABC):
20
20
 
21
21
  Samples are stored in a list to allow for dynamic growth of the sample set. Returning samples is done by creating a new Samples object from the list of samples.
22
22
 
23
+ The sampler maintains sets of state and history keys, which are used for features like checkpointing and resuming sampling.
24
+
25
+ The state of the sampler represents all variables that are updated (replaced) in a Markov Monte Carlo step, e.g. the current point of the sampler.
26
+
27
+ The history of the sampler represents all variables that are updated (appended) in a Markov Monte Carlo step, e.g. the samples and acceptance rates.
28
+
29
+ Subclasses should ensure that any new variables that are updated in a Markov Monte Carlo step are added to the state or history keys.
30
+
31
+ Saving and loading checkpoints saves and loads the state of the sampler (not the history).
32
+
33
+ Batching samples via the batch_size parameter saves the sampler history to disk in batches of the specified size.
34
+
35
+ Any other attribute stored as part of the sampler (e.g. target, initial_point) is not supposed to be updated
36
+ during sampling and should not be part of the state or history.
37
+
23
38
  """
39
+
24
40
  _STATE_KEYS = {'current_point'}
25
41
  """ Set of keys for the state dictionary. """
26
42
 
@@ -29,6 +45,10 @@ class SamplerNew(ABC):
29
45
 
30
46
  def __init__(self, target: cuqi.density.Density, initial_point=None, callback=None):
31
47
  """ Initializer for abstract base class for all samplers.
48
+
49
+ Any subclassing samplers should simply store input parameters as part of the __init__ method.
50
+
51
+ The actual initialization of the sampler should be done in the _initialize method.
32
52
 
33
53
  Parameters
34
54
  ----------
@@ -45,18 +65,37 @@ class SamplerNew(ABC):
45
65
  """
46
66
 
47
67
  self.target = target
68
+ self.initial_point = initial_point
48
69
  self.callback = callback
70
+ self._is_initialized = False
49
71
 
50
- # Choose initial point if not given
51
- if initial_point is None:
52
- initial_point = np.ones(self.dim)
72
+ def initialize(self):
73
+ """ Initialize the sampler by setting and allocating the state and history before sampling starts. """
53
74
 
54
- self.initial_point = initial_point
75
+ if self._is_initialized:
76
+ raise ValueError("Sampler is already initialized.")
55
77
 
56
- self._samples = [initial_point] # Remove. See #324.
78
+ if self.target is None:
79
+ raise ValueError("Cannot initialize sampler without a target density.")
80
+
81
+ # Default values
82
+ if self.initial_point is None:
83
+ self.initial_point = self._default_initial_point
84
+
85
+ # State variables
86
+ self.current_point = self.initial_point
87
+
88
+ # History variables
89
+ self._samples = []
90
+ self._acc = [ 1 ] # TODO. Check if we need to put 1 here.
91
+
92
+ self._initialize() # Subclass specific initialization
93
+
94
+ self._validate_initialization()
95
+
96
+ self._is_initialized = True
57
97
 
58
98
  # ------------ Abstract methods to be implemented by subclasses ------------
59
-
60
99
  @abstractmethod
61
100
  def step(self):
62
101
  """ Perform one step of the sampler by transitioning the current point to a new point according to the sampler's transition kernel. """
@@ -72,23 +111,19 @@ class SamplerNew(ABC):
72
111
  """ Validate the target is compatible with the sampler. Called when the target is set. Should raise an error if the target is not compatible. """
73
112
  pass
74
113
 
75
- # -- _pre_sample and _pre_warmup methods: can be overridden by subclasses --
76
- def _pre_sample(self):
77
- """ Any code that needs to be run before sampling. """
78
- pass
79
-
80
- def _pre_warmup(self):
81
- """ Any code that needs to be run before warmup. """
114
+ @abstractmethod
115
+ def _initialize(self):
116
+ """ Subclass specific sampler initialization. Called during the initialization of the sampler which is done before sampling starts. """
82
117
  pass
83
118
 
84
119
  # ------------ Public attributes ------------
85
120
  @property
86
- def dim(self):
121
+ def dim(self) -> int:
87
122
  """ Dimension of the target density. """
88
123
  return self.target.dim
89
124
 
90
125
  @property
91
- def geometry(self):
126
+ def geometry(self) -> cuqi.geometry.Geometry:
92
127
  """ Geometry of the target density. """
93
128
  return self.target.geometry
94
129
 
@@ -101,39 +136,49 @@ class SamplerNew(ABC):
101
136
  def target(self, value):
102
137
  """ Set the target density. Runs validation of the target. """
103
138
  self._target = value
104
- self.validate_target()
105
-
106
- @property
107
- def current_point(self):
108
- """ The current point of the sampler. """
109
- return self._current_point
110
-
111
- @current_point.setter
112
- def current_point(self, value):
113
- """ Set the current point of the sampler. """
114
- self._current_point = value
139
+ if self._target is not None:
140
+ self.validate_target()
115
141
 
116
142
  # ------------ Public methods ------------
117
-
118
143
  def get_samples(self) -> Samples:
119
144
  """ Return the samples. The internal data-structure for the samples is a dynamic list so this creates a copy. """
120
145
  return Samples(np.array(self._samples).T, self.target.geometry)
121
146
 
122
- def reset(self): # TODO. Issue here. Current point is not reset, and initial point is lost with this reset.
123
- self._samples.clear()
124
- self._acc.clear()
147
+ def reinitialize(self):
148
+ """ Re-initialize the sampler. This clears the state and history and initializes the sampler again by setting state and history to their original values. """
149
+
150
+ # Loop over state and reset to None
151
+ for key in self._STATE_KEYS:
152
+ setattr(self, key, None)
153
+
154
+ # Loop over history and reset to None
155
+ for key in self._HISTORY_KEYS:
156
+ setattr(self, key, None)
157
+
158
+ self._is_initialized = False
159
+
160
+ self.initialize()
125
161
 
126
162
  def save_checkpoint(self, path):
127
163
  """ Save the state of the sampler to a file. """
128
164
 
165
+ self._ensure_initialized()
166
+
129
167
  state = self.get_state()
130
168
 
169
+ # Convert all CUQIarrays to numpy arrays since CUQIarrays do not get pickled correctly
170
+ for key, value in state['state'].items():
171
+ if isinstance(value, cuqi.array.CUQIarray):
172
+ state['state'][key] = value.to_numpy()
173
+
131
174
  with open(path, 'wb') as handle:
132
175
  pkl.dump(state, handle, protocol=pkl.HIGHEST_PROTOCOL)
133
176
 
134
177
  def load_checkpoint(self, path):
135
178
  """ Load the state of the sampler from a file. """
136
179
 
180
+ self._ensure_initialized()
181
+
137
182
  with open(path, 'rb') as handle:
138
183
  state = pkl.load(handle)
139
184
 
@@ -155,12 +200,14 @@ class SamplerNew(ABC):
155
200
 
156
201
  """
157
202
 
203
+ self._ensure_initialized()
204
+
158
205
  # Initialize batch handler
159
206
  if batch_size > 0:
160
207
  batch_handler = _BatchHandler(batch_size, sample_path)
161
208
 
162
209
  # Any code that needs to be run before sampling
163
- self._pre_sample()
210
+ if hasattr(self, "_pre_sample"): self._pre_sample()
164
211
 
165
212
  # Draw samples
166
213
  for _ in progressbar( range(Ns) ):
@@ -195,10 +242,12 @@ class SamplerNew(ABC):
195
242
 
196
243
  """
197
244
 
245
+ self._ensure_initialized()
246
+
198
247
  tune_interval = max(int(tune_freq * Nb), 1)
199
248
 
200
249
  # Any code that needs to be run before warmup
201
- self._pre_warmup()
250
+ if hasattr(self, "_pre_warmup"): self._pre_warmup()
202
251
 
203
252
  # Draw warmup samples with tuning
204
253
  for idx in progressbar(range(Nb)):
@@ -306,20 +355,60 @@ class SamplerNew(ABC):
306
355
  raise ValueError(f"Key {key} not recognized in history dictionary of sampler {self.__class__.__name__}.")
307
356
 
308
357
  # ------------ Private methods ------------
309
-
310
358
  def _call_callback(self, sample, sample_index):
311
359
  """ Calls the callback function. Assumes input is sample and sample index"""
312
360
  if self.callback is not None:
313
361
  self.callback(sample, sample_index)
314
362
 
363
+ def _validate_initialization(self):
364
+ """ Validate the initialization of the sampler by checking all state and history keys are set. """
365
+
366
+ for key in self._STATE_KEYS:
367
+ if getattr(self, key) is None:
368
+ raise ValueError(f"Sampler state key {key} is not set after initialization.")
369
+
370
+ for key in self._HISTORY_KEYS:
371
+ if getattr(self, key) is None:
372
+ raise ValueError(f"Sampler history key {key} is not set after initialization.")
373
+
374
+ def _ensure_initialized(self):
375
+ """ Ensure the sampler is initialized. If not initialize it. """
376
+ if not self._is_initialized:
377
+ self.initialize()
378
+
379
+ @property
380
+ def _default_initial_point(self):
381
+ """ Return the default initial point for the sampler. Defaults to an array of ones. """
382
+ return np.ones(self.dim)
383
+
384
+ def __repr__(self):
385
+ """ Return a string representation of the sampler. """
386
+ if self.target is None:
387
+ return f"Sampler: {self.__class__.__name__} \n Target: None"
388
+ state = self.get_state()
389
+ msg = f" Sampler: \n\t {self.__class__.__name__} \n Target: \n \t {self.target} \n Current state: \n"
390
+ # Sort keys alphabetically
391
+ keys = sorted(state['state'].keys())
392
+ # Put _ in the end
393
+ keys = [key for key in keys if key[0] != '_'] + [key for key in keys if key[0] == '_']
394
+ for key in keys:
395
+ value = state['state'][key]
396
+ msg += f"\t {key}: {value} \n"
397
+ return msg
315
398
 
316
399
  class ProposalBasedSamplerNew(SamplerNew, ABC):
317
400
  """ Abstract base class for samplers that use a proposal distribution. """
318
401
 
319
402
  _STATE_KEYS = SamplerNew._STATE_KEYS.union({'current_target_logd', 'scale'})
320
403
 
321
- def __init__(self, target, proposal=None, scale=1, **kwargs):
322
- """ Initializer for proposal based samplers.
404
+ def __init__(self, target=None, proposal=None, scale=1, **kwargs):
405
+ """ Initializer for abstract base class for samplers that use a proposal distribution.
406
+
407
+ Any subclassing samplers should simply store input parameters as part of the __init__ method.
408
+
409
+ Initialization of the sampler should be done in the _initialize method.
410
+
411
+ See :class:`SamplerNew` for additional details.
323
412
 
324
413
  Parameters
325
414
  ----------
@@ -338,35 +427,62 @@ class ProposalBasedSamplerNew(SamplerNew, ABC):
338
427
  """
339
428
 
340
429
  super().__init__(target, **kwargs)
430
+ self.proposal = proposal
431
+ self.initial_scale = scale
432
+
433
+ def initialize(self):
434
+ """ Initialize the sampler by setting and allocating the state and history before sampling starts. """
435
+
436
+ if self._is_initialized:
437
+ raise ValueError("Sampler is already initialized.")
438
+
439
+ if self.target is None:
440
+ raise ValueError("Cannot initialize sampler without a target density.")
441
+
442
+ # Default values
443
+ if self.initial_point is None:
444
+ self.initial_point = self._default_initial_point
445
+
446
+ if self.proposal is None:
447
+ self.proposal = self._default_proposal
341
448
 
449
+ # State variables
342
450
  self.current_point = self.initial_point
451
+ self.scale = self.initial_scale
452
+
343
453
  self.current_target_logd = self.target.logd(self.current_point)
344
- self.proposal = proposal
345
- self.scale = scale
346
454
 
347
- self._acc = [ 1 ] # TODO. Check
455
+ # History variables
456
+ self._samples = []
457
+ self._acc = [ 1 ] # TODO. Check if we need to put 1 here.
348
458
 
349
- @property
350
- def proposal(self):
351
- return self._proposal
459
+ self._initialize() # Subclass specific initialization
460
+
461
+ self._validate_initialization()
462
+
463
+ self._is_initialized = True
464
+
465
+ @abstractmethod
466
+ def validate_proposal(self):
467
+ """ Validate the proposal distribution. """
468
+ pass
352
469
 
353
- @proposal.setter
354
- def proposal(self, value):
355
- self._proposal = value
470
+ @property
471
+ def _default_proposal(self):
472
+ """ Return the default proposal distribution. Defaults to a Gaussian distribution with zero mean and unit variance. """
473
+ return cuqi.distribution.Gaussian(np.zeros(self.dim), 1)
356
474
 
357
475
  @property
358
- def geometry(self): # TODO. Check if we can refactor this
359
- geom1, geom2 = None, None
360
- if hasattr(self, 'proposal') and hasattr(self.proposal, 'geometry') and self.proposal.geometry.par_dim is not None:
361
- geom1= self.proposal.geometry
362
- if hasattr(self, 'target') and hasattr(self.target, 'geometry') and self.target.geometry.par_dim is not None:
363
- geom2 = self.target.geometry
364
- if not isinstance(geom1,cuqi.geometry._DefaultGeometry) and geom1 is not None:
365
- return geom1
366
- elif not isinstance(geom2,cuqi.geometry._DefaultGeometry) and geom2 is not None:
367
- return geom2
368
- else:
369
- return cuqi.geometry._DefaultGeometry(self.dim)
476
+ def proposal(self):
477
+ """ The proposal distribution. """
478
+ return self._proposal
479
+
480
+ @proposal.setter
481
+ def proposal(self, proposal):
482
+ """ Set the proposal distribution. """
483
+ self._proposal = proposal
484
+ if self._proposal is not None:
485
+ self.validate_proposal()
370
486
 
371
487
 
372
488
  class _BatchHandler: