redback 1.0.0__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- redback/__init__.py +4 -0
- redback/constraints.py +31 -25
- redback/ejecta_relations.py +8 -8
- redback/get_data/lasair.py +3 -4
- redback/get_data/swift.py +7 -7
- redback/interaction_processes.py +1 -4
- redback/likelihoods.py +207 -21
- redback/plotting.py +11 -11
- redback/priors/csm_interaction.prior +6 -7
- redback/priors/csm_nickel.prior +3 -3
- redback/priors/one_comp_kne_rosswog_heatingrate.prior +5 -0
- redback/priors/one_component_nsbh_ejecta_relation.prior +1 -1
- redback/priors/tde_analytical.prior +5 -5
- redback/priors/tde_analytical_bolometric.prior +6 -4
- redback/priors/tophat_from_emulator.prior +9 -0
- redback/priors/two_comp_kne_rosswog_heatingrate.prior +9 -0
- redback/priors/two_component_nsbh_ejecta_relation.prior +1 -1
- redback/priors/two_layer_stratified_kilonova.prior +1 -1
- redback/priors.py +11 -0
- redback/sed.py +194 -2
- redback/simulate_transients.py +61 -32
- redback/tables/filters.csv +15 -1
- redback/tables/ztf.tar.gz +0 -0
- redback/transient/afterglow.py +3 -2
- redback/transient/kilonova.py +1 -1
- redback/transient/supernova.py +1 -1
- redback/transient/tde.py +1 -1
- redback/transient/transient.py +2 -2
- redback/transient_models/afterglow_models.py +42 -0
- redback/transient_models/combined_models.py +47 -32
- redback/transient_models/extinction_models.py +12 -5
- redback/transient_models/kilonova_models.py +247 -14
- redback/transient_models/magnetar_driven_ejecta_models.py +2 -2
- redback/transient_models/phenomenological_models.py +13 -0
- redback/transient_models/supernova_models.py +50 -36
- redback/transient_models/tde_models.py +126 -1
- redback/utils.py +283 -6
- {redback-1.0.0.dist-info → redback-1.0.2.dist-info}/METADATA +7 -4
- {redback-1.0.0.dist-info → redback-1.0.2.dist-info}/RECORD +42 -40
- {redback-1.0.0.dist-info → redback-1.0.2.dist-info}/WHEEL +1 -1
- redback/tables/ztf_obslog.csv +0 -106649
- {redback-1.0.0.dist-info → redback-1.0.2.dist-info}/LICENCE.md +0 -0
- {redback-1.0.0.dist-info → redback-1.0.2.dist-info}/top_level.txt +0 -0
redback/likelihoods.py
CHANGED
|
@@ -3,11 +3,13 @@ from typing import Any, Union
|
|
|
3
3
|
|
|
4
4
|
import bilby
|
|
5
5
|
from scipy.special import gammaln
|
|
6
|
-
|
|
6
|
+
from redback.utils import logger
|
|
7
|
+
from bilby.core.prior import DeltaFunction, Constraint
|
|
7
8
|
|
|
8
9
|
class _RedbackLikelihood(bilby.Likelihood):
|
|
9
10
|
|
|
10
|
-
def __init__(self, x: np.ndarray, y: np.ndarray, function: callable, kwargs: dict = None
|
|
11
|
+
def __init__(self, x: np.ndarray, y: np.ndarray, function: callable, kwargs: dict = None, priors=None,
|
|
12
|
+
fiducial_parameters=None) -> None:
|
|
11
13
|
"""
|
|
12
14
|
|
|
13
15
|
:param x: The x values.
|
|
@@ -18,11 +20,19 @@ class _RedbackLikelihood(bilby.Likelihood):
|
|
|
18
20
|
:type function: callable
|
|
19
21
|
:param kwargs: Any additional keywords for 'function'.
|
|
20
22
|
:type kwargs: Union[dict, None]
|
|
23
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
24
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
25
|
+
:type priors: Union[dict, None]
|
|
26
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
27
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
28
|
+
:type fiducial_parameters: Union[dict, None]
|
|
21
29
|
"""
|
|
22
30
|
self.x = x
|
|
23
31
|
self.y = y
|
|
24
32
|
self.function = function
|
|
25
33
|
self.kwargs = kwargs
|
|
34
|
+
self.priors = priors
|
|
35
|
+
self.fiducial_parameters = fiducial_parameters
|
|
26
36
|
|
|
27
37
|
parameters = bilby.core.utils.introspection.infer_parameters_from_function(func=function)
|
|
28
38
|
super().__init__(parameters=dict.fromkeys(parameters))
|
|
@@ -46,11 +56,81 @@ class _RedbackLikelihood(bilby.Likelihood):
|
|
|
46
56
|
"""
|
|
47
57
|
return len(self.x)
|
|
48
58
|
|
|
59
|
+
@property
|
|
60
|
+
def parameters_to_be_updated(self):
|
|
61
|
+
if self.priors is None:
|
|
62
|
+
return None
|
|
63
|
+
else:
|
|
64
|
+
parameters_to_be_updated = [key for key in self.priors if not isinstance(
|
|
65
|
+
self.priors[key], (DeltaFunction, Constraint, float, int))]
|
|
66
|
+
return parameters_to_be_updated
|
|
67
|
+
|
|
68
|
+
def get_parameter_dictionary_from_list(self, parameter_list):
|
|
69
|
+
parameter_dictionary = dict(zip(self.parameters_to_be_updated, parameter_list))
|
|
70
|
+
excluded_parameter_keys = set(self.fiducial_parameters) - set(self.parameters_to_be_updated)
|
|
71
|
+
for key in excluded_parameter_keys:
|
|
72
|
+
parameter_dictionary[key] = self.fiducial_parameters[key]
|
|
73
|
+
return parameter_dictionary
|
|
74
|
+
|
|
75
|
+
def get_parameter_list_from_dictionary(self, parameter_dict):
|
|
76
|
+
return [parameter_dict[k] for k in self.parameters_to_be_updated]
|
|
77
|
+
|
|
78
|
+
def get_bounds_from_priors(self, priors):
|
|
79
|
+
bounds = []
|
|
80
|
+
for key in self.parameters_to_be_updated:
|
|
81
|
+
bounds.append([priors[key].minimum, priors[key].maximum])
|
|
82
|
+
return bounds
|
|
83
|
+
|
|
84
|
+
def lnlike_scipy_maximize(self, parameter_list):
|
|
85
|
+
self.parameters.update(self.get_parameter_dictionary_from_list(parameter_list))
|
|
86
|
+
return -self.log_likelihood()
|
|
87
|
+
|
|
88
|
+
def find_maximum_likelihood_parameters(self, iterations=5, maximization_kwargs=None, method='Nelder-Mead',
|
|
89
|
+
break_threshold=1e-3):
|
|
90
|
+
"""
|
|
91
|
+
Estimate the maximum likelihood
|
|
92
|
+
|
|
93
|
+
:param iterations: Iterations to run the minimizer for before stopping. Default is 5.
|
|
94
|
+
:param maximization_kwargs: Any extra keyword arguments passed to the scipy minimize function
|
|
95
|
+
:param method: Minimize method to use. Default is 'Nelder-Mead'
|
|
96
|
+
:param break_threshold: The threshold for the difference in log likelihood to break the loop. Default is 1e-3.
|
|
97
|
+
:return: Dictionary of maximum likelihood parameters
|
|
98
|
+
"""
|
|
99
|
+
from scipy.optimize import minimize
|
|
100
|
+
parameter_bounds = self.get_bounds_from_priors(self.priors)
|
|
101
|
+
if self.priors is None:
|
|
102
|
+
raise ValueError("Priors must be provided to use this functionality")
|
|
103
|
+
if maximization_kwargs is None:
|
|
104
|
+
maximization_kwargs = dict()
|
|
105
|
+
self.parameters.update(self.fiducial_parameters)
|
|
106
|
+
self.parameters["fiducial"] = 0
|
|
107
|
+
updated_parameters_list = self.get_parameter_list_from_dictionary(self.fiducial_parameters)
|
|
108
|
+
old_fiducial_ln_likelihood = self.log_likelihood()
|
|
109
|
+
for it in range(iterations):
|
|
110
|
+
logger.info(f"Optimizing fiducial parameters. Iteration : {it + 1}")
|
|
111
|
+
output = minimize(
|
|
112
|
+
self.lnlike_scipy_maximize,
|
|
113
|
+
x0=updated_parameters_list,
|
|
114
|
+
bounds=parameter_bounds,
|
|
115
|
+
method=method,
|
|
116
|
+
**maximization_kwargs,)
|
|
117
|
+
updated_parameters_list = output['x']
|
|
118
|
+
updated_parameters = self.get_parameter_dictionary_from_list(updated_parameters_list)
|
|
119
|
+
self.parameters.update(updated_parameters)
|
|
120
|
+
new_fiducial_ln_likelihood = self.log_likelihood_ratio()
|
|
121
|
+
logger.info(f"Current lnlikelihood: {new_fiducial_ln_likelihood:.2f}")
|
|
122
|
+
logger.info(f"Updated parameters: {updated_parameters}")
|
|
123
|
+
if new_fiducial_ln_likelihood - old_fiducial_ln_likelihood < break_threshold:
|
|
124
|
+
break
|
|
125
|
+
old_fiducial_ln_likelihood = new_fiducial_ln_likelihood
|
|
126
|
+
return updated_parameters
|
|
127
|
+
|
|
49
128
|
|
|
50
129
|
class GaussianLikelihood(_RedbackLikelihood):
|
|
51
130
|
def __init__(
|
|
52
131
|
self, x: np.ndarray, y: np.ndarray, sigma: Union[float, None, np.ndarray],
|
|
53
|
-
function: callable, kwargs: dict = None
|
|
132
|
+
function: callable, kwargs: dict = None, priors=None,
|
|
133
|
+
fiducial_parameters=None) -> None:
|
|
54
134
|
"""A general Gaussian likelihood - the parameters are inferred from the arguments of function.
|
|
55
135
|
|
|
56
136
|
:param x: The x values.
|
|
@@ -67,10 +147,17 @@ class GaussianLikelihood(_RedbackLikelihood):
|
|
|
67
147
|
:type function: callable
|
|
68
148
|
:param kwargs: Any additional keywords for 'function'.
|
|
69
149
|
:type kwargs: dict
|
|
150
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
151
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
152
|
+
:type priors: Union[dict, None]
|
|
153
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
154
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
155
|
+
:type fiducial_parameters: Union[dict, None]
|
|
70
156
|
"""
|
|
71
157
|
|
|
72
158
|
self._noise_log_likelihood = None
|
|
73
|
-
super().__init__(x=x, y=y, function=function, kwargs=kwargs
|
|
159
|
+
super().__init__(x=x, y=y, function=function, kwargs=kwargs, priors=priors,
|
|
160
|
+
fiducial_parameters=fiducial_parameters)
|
|
74
161
|
self.sigma = sigma
|
|
75
162
|
if self.sigma is None:
|
|
76
163
|
self.parameters['sigma'] = None
|
|
@@ -121,7 +208,8 @@ class GaussianLikelihood(_RedbackLikelihood):
|
|
|
121
208
|
class GaussianLikelihoodUniformXErrors(GaussianLikelihood):
|
|
122
209
|
def __init__(
|
|
123
210
|
self, x: np.ndarray, y: np.ndarray, sigma: Union[float, None, np.ndarray],
|
|
124
|
-
bin_size: Union[float, None, np.ndarray], function: callable, kwargs: dict = None
|
|
211
|
+
bin_size: Union[float, None, np.ndarray], function: callable, kwargs: dict = None, priors=None,
|
|
212
|
+
fiducial_parameters=None) -> None:
|
|
125
213
|
"""A general Gaussian likelihood with uniform errors in x- the parameters are inferred from the
|
|
126
214
|
arguments of function. Takes into account the X errors with a Uniform likelihood between the
|
|
127
215
|
bin high and bin low values. Note that the prior for the true x values must be uniform in this range!
|
|
@@ -142,9 +230,16 @@ class GaussianLikelihoodUniformXErrors(GaussianLikelihood):
|
|
|
142
230
|
:type function: callable
|
|
143
231
|
:param kwargs: Any additional keywords for 'function'.
|
|
144
232
|
:type kwargs: dict
|
|
233
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
234
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
235
|
+
:type priors: Union[dict, None]
|
|
236
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
237
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
238
|
+
:type fiducial_parameters: Union[dict, None]
|
|
145
239
|
"""
|
|
146
240
|
|
|
147
|
-
super().__init__(x=x, y=y, sigma=sigma, function=function, kwargs=kwargs
|
|
241
|
+
super().__init__(x=x, y=y, sigma=sigma, function=function, kwargs=kwargs, priors=priors,
|
|
242
|
+
fiducial_parameters=fiducial_parameters)
|
|
148
243
|
self.xerr = bin_size * np.ones(self.n)
|
|
149
244
|
|
|
150
245
|
def noise_log_likelihood(self) -> float:
|
|
@@ -183,7 +278,7 @@ class GaussianLikelihoodUniformXErrors(GaussianLikelihood):
|
|
|
183
278
|
class GaussianLikelihoodQuadratureNoise(GaussianLikelihood):
|
|
184
279
|
def __init__(
|
|
185
280
|
self, x: np.ndarray, y: np.ndarray, sigma_i: Union[float, None, np.ndarray],
|
|
186
|
-
function: callable, kwargs: dict = None) -> None:
|
|
281
|
+
function: callable, kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
187
282
|
"""
|
|
188
283
|
A general Gaussian likelihood - the parameters are inferred from the
|
|
189
284
|
arguments of function
|
|
@@ -205,7 +300,8 @@ class GaussianLikelihoodQuadratureNoise(GaussianLikelihood):
|
|
|
205
300
|
"""
|
|
206
301
|
self.sigma_i = sigma_i
|
|
207
302
|
# These lines of code infer the parameters from the provided function
|
|
208
|
-
super().__init__(x=x, y=y, sigma=sigma_i, function=function, kwargs=kwargs
|
|
303
|
+
super().__init__(x=x, y=y, sigma=sigma_i, function=function, kwargs=kwargs, priors=priors,
|
|
304
|
+
fiducial_parameters=fiducial_parameters)
|
|
209
305
|
|
|
210
306
|
@property
|
|
211
307
|
def full_sigma(self) -> Union[float, np.ndarray]:
|
|
@@ -231,19 +327,80 @@ class GaussianLikelihoodQuadratureNoise(GaussianLikelihood):
|
|
|
231
327
|
"""
|
|
232
328
|
return np.nan_to_num(self._gaussian_log_likelihood(res=self.residual, sigma=self.full_sigma))
|
|
233
329
|
|
|
330
|
+
class GaussianLikelihoodWithFractionalNoise(GaussianLikelihood):
|
|
331
|
+
def __init__(
|
|
332
|
+
self, x: np.ndarray, y: np.ndarray, sigma_i: Union[float, None, np.ndarray],
|
|
333
|
+
function: callable, kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
334
|
+
"""
|
|
335
|
+
A Gaussian likelihood with noise that is proportional to the model.
|
|
336
|
+
The parameters are inferred from the arguments of function
|
|
337
|
+
|
|
338
|
+
:param x: The x values.
|
|
339
|
+
:type x: np.ndarray
|
|
340
|
+
:param y: The y values.
|
|
341
|
+
:type y: np.ndarray
|
|
342
|
+
:param sigma_i: The standard deviation of the noise. This is part of the full noise.
|
|
343
|
+
The sigma used in the likelihood is sigma = sqrt(sigma_i^2*model_y**2)
|
|
344
|
+
:type sigma_i: Union[float, None, np.ndarray]
|
|
345
|
+
:param function:
|
|
346
|
+
The python function to fit to the data. Note, this must take the
|
|
347
|
+
dependent variable as its first argument. The other arguments
|
|
348
|
+
will require a prior and will be sampled over (unless a fixed
|
|
349
|
+
value is given).
|
|
350
|
+
:type function: callable
|
|
351
|
+
:param kwargs: Any additional keywords for 'function'.
|
|
352
|
+
:type kwargs: dict
|
|
353
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
354
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
355
|
+
:type priors: Union[dict, None]
|
|
356
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
357
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
358
|
+
:type fiducial_parameters: Union[dict, None]
|
|
359
|
+
"""
|
|
360
|
+
self.sigma_i = sigma_i
|
|
361
|
+
# These lines of code infer the parameters from the provided function
|
|
362
|
+
super().__init__(x=x, y=y, sigma=sigma_i, function=function, kwargs=kwargs, priors=priors,
|
|
363
|
+
fiducial_parameters=fiducial_parameters)
|
|
364
|
+
|
|
365
|
+
@property
|
|
366
|
+
def full_sigma(self) -> Union[float, np.ndarray]:
|
|
367
|
+
"""
|
|
368
|
+
:return: The standard deviation of the full noise
|
|
369
|
+
:rtype: Union[float, np.ndarray]
|
|
370
|
+
"""
|
|
371
|
+
model_y = self.function(self.x, **self.parameters, **self.kwargs)
|
|
372
|
+
return np.sqrt(self.sigma_i**2.*model_y**2)
|
|
373
|
+
|
|
374
|
+
def noise_log_likelihood(self) -> float:
|
|
375
|
+
"""
|
|
376
|
+
:return: The noise log-likelihood, i.e. the log-likelihood assuming the signal is just noise.
|
|
377
|
+
:rtype: float
|
|
378
|
+
"""
|
|
379
|
+
if self._noise_log_likelihood is None:
|
|
380
|
+
self._noise_log_likelihood = self._gaussian_log_likelihood(res=self.y, sigma=self.sigma_i)
|
|
381
|
+
return self._noise_log_likelihood
|
|
382
|
+
|
|
383
|
+
def log_likelihood(self) -> float:
|
|
384
|
+
"""
|
|
385
|
+
:return: The log-likelihood.
|
|
386
|
+
:rtype: float
|
|
387
|
+
"""
|
|
388
|
+
return np.nan_to_num(self._gaussian_log_likelihood(res=self.residual, sigma=self.full_sigma))
|
|
389
|
+
|
|
234
390
|
class GaussianLikelihoodWithSystematicNoise(GaussianLikelihood):
|
|
235
391
|
def __init__(
|
|
236
392
|
self, x: np.ndarray, y: np.ndarray, sigma_i: Union[float, None, np.ndarray],
|
|
237
|
-
function: callable, kwargs: dict = None) -> None:
|
|
393
|
+
function: callable, kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
238
394
|
"""
|
|
239
|
-
A
|
|
240
|
-
arguments of function
|
|
395
|
+
A Gaussian likelihood with a systematic noise term that is proportional to the model + some additive noise.
|
|
396
|
+
The parameters are inferred from the arguments of function
|
|
241
397
|
|
|
398
|
+
:param x: The x values.
|
|
242
399
|
:type x: np.ndarray
|
|
243
400
|
:param y: The y values.
|
|
244
401
|
:type y: np.ndarray
|
|
245
402
|
:param sigma_i: The standard deviation of the noise. This is part of the full noise.
|
|
246
|
-
The sigma used in the likelihood is sigma = sqrt(sigma_i^2 + sigma^2)
|
|
403
|
+
The sigma used in the likelihood is sigma = sqrt(sigma_i^2 + model_y**2*sigma^2)
|
|
247
404
|
:type sigma_i: Union[float, None, np.ndarray]
|
|
248
405
|
:param function:
|
|
249
406
|
The python function to fit to the data. Note, this must take the
|
|
@@ -253,10 +410,17 @@ class GaussianLikelihoodWithSystematicNoise(GaussianLikelihood):
|
|
|
253
410
|
:type function: callable
|
|
254
411
|
:param kwargs: Any additional keywords for 'function'.
|
|
255
412
|
:type kwargs: dict
|
|
413
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
414
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
415
|
+
:type priors: Union[dict, None]
|
|
416
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
417
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
418
|
+
:type fiducial_parameters: Union[dict, None]
|
|
256
419
|
"""
|
|
257
420
|
self.sigma_i = sigma_i
|
|
258
421
|
# These lines of code infer the parameters from the provided function
|
|
259
|
-
super().__init__(x=x, y=y, sigma=sigma_i, function=function, kwargs=kwargs
|
|
422
|
+
super().__init__(x=x, y=y, sigma=sigma_i, function=function, kwargs=kwargs, priors=priors,
|
|
423
|
+
fiducial_parameters=fiducial_parameters)
|
|
260
424
|
|
|
261
425
|
@property
|
|
262
426
|
def full_sigma(self) -> Union[float, np.ndarray]:
|
|
@@ -286,10 +450,11 @@ class GaussianLikelihoodWithSystematicNoise(GaussianLikelihood):
|
|
|
286
450
|
class GaussianLikelihoodQuadratureNoiseNonDetections(GaussianLikelihoodQuadratureNoise):
|
|
287
451
|
def __init__(
|
|
288
452
|
self, x: np.ndarray, y: np.ndarray, sigma_i: Union[float, np.ndarray], function: callable,
|
|
289
|
-
kwargs: dict = None, upperlimit_kwargs: dict = None) -> None:
|
|
453
|
+
kwargs: dict = None, upperlimit_kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
290
454
|
"""A general Gaussian likelihood - the parameters are inferred from the
|
|
291
455
|
arguments of function. Takes into account non-detections with a Uniform likelihood for those points
|
|
292
456
|
|
|
457
|
+
:param x: The x values.
|
|
293
458
|
:type x: np.ndarray
|
|
294
459
|
:param y: The y values.
|
|
295
460
|
:type y: np.ndarray
|
|
@@ -304,8 +469,15 @@ class GaussianLikelihoodQuadratureNoiseNonDetections(GaussianLikelihoodQuadratur
|
|
|
304
469
|
:type function: callable
|
|
305
470
|
:param kwargs: Any additional keywords for 'function'.
|
|
306
471
|
:type kwargs: dict
|
|
307
|
-
|
|
308
|
-
|
|
472
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
473
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
474
|
+
:type priors: Union[dict, None]
|
|
475
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
476
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
477
|
+
:type fiducial_parameters: Union[dict, None]
|
|
478
|
+
"""
|
|
479
|
+
super().__init__(x=x, y=y, sigma_i=sigma_i, function=function, kwargs=kwargs, priors=priors,
|
|
480
|
+
fiducial_parameters=fiducial_parameters)
|
|
309
481
|
self.upperlimit_kwargs = upperlimit_kwargs
|
|
310
482
|
|
|
311
483
|
@property
|
|
@@ -345,7 +517,7 @@ class GRBGaussianLikelihood(GaussianLikelihood):
|
|
|
345
517
|
|
|
346
518
|
def __init__(
|
|
347
519
|
self, x: np.ndarray, y: np.ndarray, sigma: Union[float, np.ndarray],
|
|
348
|
-
function: callable, kwargs: dict = None) -> None:
|
|
520
|
+
function: callable, kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
349
521
|
"""A general Gaussian likelihood - the parameters are inferred from the
|
|
350
522
|
arguments of function.
|
|
351
523
|
|
|
@@ -363,14 +535,21 @@ class GRBGaussianLikelihood(GaussianLikelihood):
|
|
|
363
535
|
:type function: callable
|
|
364
536
|
:param kwargs: Any additional keywords for 'function'.
|
|
365
537
|
:type kwargs: dict
|
|
538
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
539
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
540
|
+
:type priors: Union[dict, None]
|
|
541
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
542
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
543
|
+
:type fiducial_parameters: Union[dict, None]
|
|
366
544
|
"""
|
|
367
|
-
super().__init__(x=x, y=y, sigma=sigma, function=function, kwargs=kwargs
|
|
545
|
+
super().__init__(x=x, y=y, sigma=sigma, function=function, kwargs=kwargs, priors=priors,
|
|
546
|
+
fiducial_parameters=fiducial_parameters)
|
|
368
547
|
|
|
369
548
|
|
|
370
549
|
class PoissonLikelihood(_RedbackLikelihood):
|
|
371
550
|
def __init__(
|
|
372
551
|
self, time: np.ndarray, counts: np.ndarray, function: callable, integrated_rate_function: bool = True,
|
|
373
|
-
dt: Union[float, np.ndarray] = None, kwargs: dict = None) -> None:
|
|
552
|
+
dt: Union[float, np.ndarray] = None, kwargs: dict = None, priors=None, fiducial_parameters=None) -> None:
|
|
374
553
|
"""
|
|
375
554
|
:param time: The time values.
|
|
376
555
|
:type time: np.ndarray
|
|
@@ -389,8 +568,15 @@ class PoissonLikelihood(_RedbackLikelihood):
|
|
|
389
568
|
:type dt: Union[float, None, np.ndarray]
|
|
390
569
|
:param kwargs: Any additional keywords for 'function'.
|
|
391
570
|
:type kwargs: dict
|
|
392
|
-
|
|
393
|
-
|
|
571
|
+
:param priors: The priors for the parameters. Default to None if not provided.
|
|
572
|
+
Only necessary if using maximum likelihood estimation functionality.
|
|
573
|
+
:type priors: Union[dict, None]
|
|
574
|
+
:param fiducial_parameters: The starting guesses for model parameters to
|
|
575
|
+
use in the optimization for maximum likelihood estimation. Default to None if not provided.
|
|
576
|
+
:type fiducial_parameters: Union[dict, None]
|
|
577
|
+
"""
|
|
578
|
+
super(PoissonLikelihood, self).__init__(x=time, y=counts, function=function, kwargs=kwargs, priors=priors,
|
|
579
|
+
fiducial_parameters=fiducial_parameters)
|
|
394
580
|
self.integrated_rate_function = integrated_rate_function
|
|
395
581
|
self.dt = dt
|
|
396
582
|
self.parameters['background_rate'] = 0
|
redback/plotting.py
CHANGED
|
@@ -346,7 +346,7 @@ class IntegratedFluxPlotter(Plotter):
|
|
|
346
346
|
axes.plot(times, ys, color=self.random_sample_color, alpha=self.random_sample_alpha, lw=self.linewidth,
|
|
347
347
|
zorder=self.zorder)
|
|
348
348
|
elif self.uncertainty_mode == "credible_intervals":
|
|
349
|
-
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=random_ys_list)
|
|
349
|
+
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=random_ys_list, interval=self.credible_interval_level)
|
|
350
350
|
axes.fill_between(
|
|
351
351
|
times, lower_bound, upper_bound, alpha=self.uncertainty_band_alpha, color=self.max_likelihood_color)
|
|
352
352
|
|
|
@@ -391,11 +391,11 @@ class LuminosityPlotter(IntegratedFluxPlotter):
|
|
|
391
391
|
|
|
392
392
|
class MagnitudePlotter(Plotter):
|
|
393
393
|
|
|
394
|
-
xlim_low_phase_model_multiplier = 0.9
|
|
395
|
-
xlim_high_phase_model_multiplier = 1.1
|
|
396
|
-
xlim_high_multiplier = 1.2
|
|
397
|
-
ylim_low_magnitude_multiplier = 0.8
|
|
398
|
-
ylim_high_magnitude_multiplier = 1.2
|
|
394
|
+
xlim_low_phase_model_multiplier = KwargsAccessorWithDefault("xlim_low_multiplier", 0.9)
|
|
395
|
+
xlim_high_phase_model_multiplier = KwargsAccessorWithDefault("xlim_high_multiplier", 1.1)
|
|
396
|
+
xlim_high_multiplier = KwargsAccessorWithDefault("xlim_high_multiplier", 1.2)
|
|
397
|
+
ylim_low_magnitude_multiplier = KwargsAccessorWithDefault("ylim_low_multiplier", 0.8)
|
|
398
|
+
ylim_high_magnitude_multiplier = KwargsAccessorWithDefault("ylim_high_multiplier", 1.2)
|
|
399
399
|
ncols = KwargsAccessorWithDefault("ncols", 2)
|
|
400
400
|
|
|
401
401
|
@property
|
|
@@ -551,7 +551,7 @@ class MagnitudePlotter(Plotter):
|
|
|
551
551
|
elif self.band_scaling.get("type") == '+':
|
|
552
552
|
ax.errorbar(
|
|
553
553
|
self.transient.x[indices] - self._reference_mjd_date, self.transient.y[indices] + self.band_scaling.get(band),
|
|
554
|
-
xerr=self._get_x_err(indices), yerr=self.transient.y_err[indices]
|
|
554
|
+
xerr=self._get_x_err(indices), yerr=self.transient.y_err[indices],
|
|
555
555
|
fmt=self.errorbar_fmt, ms=self.ms, color=color,
|
|
556
556
|
elinewidth=self.elinewidth, capsize=self.capsize, label=label)
|
|
557
557
|
else:
|
|
@@ -635,11 +635,11 @@ class MagnitudePlotter(Plotter):
|
|
|
635
635
|
elif self.uncertainty_mode == "credible_intervals":
|
|
636
636
|
if band in self.band_scaling:
|
|
637
637
|
if self.band_scaling.get("type") == 'x':
|
|
638
|
-
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list) * self.band_scaling.get(band))
|
|
638
|
+
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list) * self.band_scaling.get(band), interval=self.credible_interval_level)
|
|
639
639
|
elif self.band_scaling.get("type") == '+':
|
|
640
|
-
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list) + self.band_scaling.get(band))
|
|
640
|
+
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list) + self.band_scaling.get(band), interval=self.credible_interval_level)
|
|
641
641
|
else:
|
|
642
|
-
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list))
|
|
642
|
+
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=np.array(random_ys_list), interval=self.credible_interval_level)
|
|
643
643
|
axes.fill_between(
|
|
644
644
|
times - self._reference_mjd_date, lower_bound, upper_bound,
|
|
645
645
|
alpha=self.uncertainty_band_alpha, color=color_sample)
|
|
@@ -789,7 +789,7 @@ class MagnitudePlotter(Plotter):
|
|
|
789
789
|
axes[ii].plot(times - self._reference_mjd_date, random_ys, color=color_sample,
|
|
790
790
|
alpha=self.random_sample_alpha, lw=self.linewidth, zorder=self.zorder)
|
|
791
791
|
elif self.uncertainty_mode == "credible_intervals":
|
|
792
|
-
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=random_ys_list)
|
|
792
|
+
lower_bound, upper_bound, _ = redback.utils.calc_credible_intervals(samples=random_ys_list, interval=self.credible_interval_level)
|
|
793
793
|
axes[ii].fill_between(
|
|
794
794
|
times - self._reference_mjd_date, lower_bound, upper_bound,
|
|
795
795
|
alpha=self.uncertainty_band_alpha, color=color_sample)
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
redshift = Uniform(1e-3,3,name='redshift', latex_label = r'$z$')
|
|
2
|
-
mej = LogUniform(
|
|
3
|
-
csm_mass = LogUniform(
|
|
2
|
+
mej = LogUniform(1, 100, 'mej', latex_label = r'$M_{\mathrm{ej} }~(M_\odot)$')
|
|
3
|
+
csm_mass = LogUniform(1, 100, 'csm_mass', latex_label = r'$M_{\mathrm{csm}}~(M_\odot)$')
|
|
4
4
|
vej = LogUniform(1e3, 1e5, 'vej', latex_label = r'$v_{\mathrm{ej}}~(\mathrm{km}/\mathrm{s})$')
|
|
5
|
-
eta = Uniform(0,
|
|
6
|
-
rho =
|
|
7
|
-
r0 = Uniform(
|
|
5
|
+
eta = Uniform(0, 2, '\\eta', latex_label = r'$\\eta$')
|
|
6
|
+
rho = LogUniform(1e-14, 1e-12, 'rho', latex_label = r'$\\rho$')
|
|
7
|
+
r0 = Uniform(50, 700, 'r0', latex_label=r'$r_0~({\mathrm{AU}})$')
|
|
8
8
|
kappa = Uniform(0.05, 2, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
9
|
-
|
|
10
|
-
temperature_floor = LogUniform(1e3,1e5,name = 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
9
|
+
temperature_floor = LogUniform(100,1e4,name = 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
redback/priors/csm_nickel.prior
CHANGED
|
@@ -3,9 +3,9 @@ mej = LogUniform(1e-4, 100, 'mej', latex_label = r'$M_{\mathrm{ej} }~(M_\odot)$'
|
|
|
3
3
|
f_nickel = LogUniform(1e-3,1,name='f_nickel', latex_label = r'$f_{\mathrm{Ni}}$')
|
|
4
4
|
csm_mass = LogUniform(1e-4, 100, 'csm_mass', latex_label = r'$M_{\mathrm{csm}}~(M_\odot)$')
|
|
5
5
|
ek = LogUniform(1e48, 1e52, 'ek', latex_label = r'$E_{\mathrm{kin}}~(\mathrm{ erg})$')
|
|
6
|
-
eta = Uniform(0,
|
|
7
|
-
rho =
|
|
6
|
+
eta = Uniform(0, 2, '\\eta', latex_label = r'$\\eta$')
|
|
7
|
+
rho = LogUniform(1e-15, 1e-12, 'rho', latex_label = r'$\\rho$')
|
|
8
8
|
r0 = Uniform(4, 8, 'r0', latex_label=r'$r_0~({\mathrm{AU}})$')
|
|
9
9
|
kappa = Uniform(0.05, 2, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
10
10
|
kappa_gamma = LogUniform(1e-4, 1e4, 'kappa_gamma', latex_label = r'$\\kappa_{\\gamma}~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
11
|
-
temperature_floor = LogUniform(
|
|
11
|
+
temperature_floor = LogUniform(100,1e4,name = 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
redshift = Uniform(1e-6, 0.1, 'redshift', latex_label = r'$z$')
|
|
2
|
+
mej = Uniform(1e-2, 0.05, 'mej', latex_label = r'$M_{\mathrm{ej} }~(M_\odot)$')
|
|
3
|
+
vej = Uniform(0.05, 0.3, 'vej', latex_label = r'$v_{\mathrm{ej}}~(c)$')
|
|
4
|
+
ye = Uniform(0.05, 0.4, 'ye', latex_label = r'$Y_{e}$')
|
|
5
|
+
temperature_floor = LogUniform(100, 6000, 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
redshift = Uniform(1e-3, 0.1, 'redshift', latex_label = r'$z$')
|
|
2
2
|
mass_bh = Uniform(2.5, 35, 'mass_bh', latex_label = r'$M_1~(M_\odot)$')
|
|
3
3
|
mass_ns = Uniform(1.1, 2.1, 'mass_ns', latex_label = r'$M_2~(M_\odot)$')
|
|
4
|
-
|
|
4
|
+
chi_bh = Uniform(0, 0.99, 'chi_bh', latex_label = r'$\chi_{\mathrm{bh}}$')
|
|
5
5
|
lambda_ns = Uniform(0, 5000, 'lambda_2', latex_label = r'$\Lambda_{\mathrm{NS}}$')
|
|
6
6
|
kappa = Uniform(1, 30, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
7
7
|
temperature_floor = LogUniform(100, 6000, 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
redshift = Uniform(0.01, 3, 'redshift', latex_label=r'$z$')
|
|
2
|
-
mej = LogUniform(
|
|
2
|
+
mej = LogUniform(0.1, 100, 'mej', latex_label = r'$M_{\mathrm{ej} }~(M_\odot)$')
|
|
3
3
|
vej = LogUniform(1e3, 1e5, 'vej', latex_label = r'$v_{\mathrm{ej}}~(\mathrm{km}/\mathrm{s})$')
|
|
4
|
-
kappa = Uniform(0.05,
|
|
4
|
+
kappa = Uniform(0.05, 1e2, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
5
5
|
kappa_gamma = LogUniform(1e-4, 1e4, 'kappa_gamma', latex_label = r'$\\kappa_{\\gamma}~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
6
|
-
temperature_floor = LogUniform(1e3,
|
|
7
|
-
l0 = LogUniform(1e51,
|
|
8
|
-
t_0_turn = LogUniform(1e-4,
|
|
6
|
+
temperature_floor = LogUniform(1e3,1e4,name = 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
7
|
+
l0 = LogUniform(1e51, 1e60, "l0", latex_label="$l_0$")
|
|
8
|
+
t_0_turn = LogUniform(1e-4, 5e2, "t_0_turn", latex_label="$t_{0 t}$")
|
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
|
|
1
|
+
redshift = Uniform(0.01, 3, 'redshift', latex_label=r'$z$')
|
|
2
|
+
mej = LogUniform(0.1, 100, 'mej', latex_label = r'$M_{\mathrm{ej} }~(M_\odot)$')
|
|
2
3
|
vej = LogUniform(1e3, 1e5, 'vej', latex_label = r'$v_{\mathrm{ej}}~(\mathrm{km}/\mathrm{s})$')
|
|
3
|
-
kappa = Uniform(0.05,
|
|
4
|
+
kappa = Uniform(0.05, 1e2, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
4
5
|
kappa_gamma = LogUniform(1e-4, 1e4, 'kappa_gamma', latex_label = r'$\\kappa_{\\gamma}~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
5
|
-
|
|
6
|
-
|
|
6
|
+
temperature_floor = LogUniform(1e3,1e4,name = 'temperature_floor', latex_label = r'$T_{\mathrm{floor}}~(\mathrm{K})$')
|
|
7
|
+
l0 = LogUniform(1e51, 1e60, "l0", latex_label="$l_0$")
|
|
8
|
+
t_0_turn = LogUniform(1e-4, 5e2, "t_0_turn", latex_label="$t_{0 t}$")
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
redshift = Uniform(0.01, 3, 'redshift', latex_label=r'$z$')
|
|
2
|
+
thv = Sine(name='thv', maximum=np.pi/2, latex_label=r'$\\theta_{\mathrm{observer}}~(\mathrm{rad})$')
|
|
3
|
+
loge0 = Uniform(44, 54, 'loge0', latex_label=r'$\log_{10}~E_{0} / {\mathrm{erg}}$')
|
|
4
|
+
thc = Uniform(0.01, 0.1, 'thc', latex_label=r'$\\theta_{\mathrm{core}}~({\mathrm{rad}})$')
|
|
5
|
+
logn0 = Uniform(-5, 2, 'logn0', latex_label=r'$\log_{10}~n_{\mathrm{ism}} / {\mathrm{cm}}^{-3}$')
|
|
6
|
+
p = Uniform(2, 3, 'p', latex_label=r'$p$')
|
|
7
|
+
logepse = Uniform(-5, 0, 'logepse', latex_label=r'$\log_{10}~\\epsilon_{e}$')
|
|
8
|
+
logepsb = Uniform(-5, 0, 'logepsb', latex_label=r'$\log_{10}~\\epsilon_{B}$')
|
|
9
|
+
g0 = Uniform(100,2000, 'g0', latex_label=r'$\\Gamma_{0}$')
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
redshift = Uniform(1e-6, 0.1, 'redshift', latex_label = r'$z$')
|
|
2
|
+
mej_1 = Uniform(1e-2, 0.05, 'mej', latex_label = r'$M_{\mathrm{ej}~1}~(M_\odot)$')
|
|
3
|
+
vej_1 = Uniform(0.05, 0.3, 'vej', latex_label = r'$v_{\mathrm{ej}~1}~(c)$')
|
|
4
|
+
ye_1 = Uniform(0.05, 0.4, 'ye', latex_label = r'$Y_{e}~1$')
|
|
5
|
+
temperature_floor_1 = LogUniform(100, 6000, 'temperature_floor', latex_label = r'$T_{\mathrm{floor}~1}~(\mathrm{K})$')
|
|
6
|
+
mej_2 = Uniform(1e-2, 0.05, 'mej', latex_label = r'$M_{\mathrm{ej}~2}~(M_\odot)$')
|
|
7
|
+
vej_2 = Uniform(0.05, 0.3, 'vej', latex_label = r'$v_{\mathrm{ej}~2}~(c)$')
|
|
8
|
+
ye_2 = Uniform(0.05, 0.4, 'ye', latex_label = r'$Y_{e}~2$')
|
|
9
|
+
temperature_floor_2 = LogUniform(100, 6000, 'temperature_floor', latex_label = r'$T_{\mathrm{floor}~2}~(\mathrm{K})$')
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
redshift = Uniform(1e-3, 0.1, 'redshift', latex_label = r'$z$')
|
|
2
2
|
mass_bh = Uniform(2.5, 35, 'mass_bh', latex_label = r'$M(\mathrm{BH})~(M_\odot)$')
|
|
3
3
|
mass_ns = Uniform(1.1, 2.1, 'mass_ns', latex_label = r'$M(\mathrm{NS})~(M_\odot)$')
|
|
4
|
-
|
|
4
|
+
chi_bh = Uniform(0, 0.99, 'chi_eff', latex_label = r'$\\chi_{\mathrm{bh}}$')
|
|
5
5
|
lambda_ns = Uniform(0, 5000, 'lambda_2', latex_label = r'$\\Lambda_{\mathrm{NS}}$')
|
|
6
6
|
zeta = Uniform(0.05, 0.6, 'zeta', latex_label = r'$\\zeta$')
|
|
7
7
|
vej_2 = Uniform(0.01, 0.3, 'vej', latex_label = r'$v_{\mathrm{ej}~2}~(c)$')
|
|
@@ -3,4 +3,4 @@ mej = Uniform(1e-2, 0.05, 'mej', latex_label = r'$M_{\mathrm{ej}}~(M_\odot)$')
|
|
|
3
3
|
vej_1 = Uniform(0.05, 0.2, 'vej_1', latex_label = r'$v_{\mathrm{ej}~1}~(c)$')
|
|
4
4
|
vej_2 = Uniform(0.3, 0.5, 'vej_2', latex_label = r'$v_{\mathrm{ej}~2}~(c)$')
|
|
5
5
|
kappa = Uniform(1, 30, 'kappa', latex_label = r'$\\kappa~(\mathrm{cm}^{2}/\mathrm{g})$')
|
|
6
|
-
beta = Uniform(1
|
|
6
|
+
beta = Uniform(3.1, 8, 'beta', latex_label = r'$\\beta$')
|
redback/priors.py
CHANGED
|
@@ -9,6 +9,17 @@ from redback.utils import logger
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
def get_priors(model, times=None, y=None, yerr=None, dt=None, **kwargs):
|
|
12
|
+
"""
|
|
13
|
+
Get the prior for the given model. If the model is a prompt model, the times, y, and yerr must be provided.
|
|
14
|
+
|
|
15
|
+
:param model: String referring to a name of a model implemented in Redback.
|
|
16
|
+
:param times: Time array
|
|
17
|
+
:param y: Y values, arbitrary units
|
|
18
|
+
:param yerr: Error on y values, arbitrary units
|
|
19
|
+
:param dt: time interval
|
|
20
|
+
:param kwargs: Extra arguments to be passed to the prior function
|
|
21
|
+
:return: priors: PriorDict object
|
|
22
|
+
"""
|
|
12
23
|
prompt_prior_functions = dict(gaussian=get_gaussian_priors, skew_gaussian=get_skew_gaussian_priors,
|
|
13
24
|
skew_exponential=get_skew_exponential_priors, fred=get_fred_priors,
|
|
14
25
|
fred_extended=get_fred_extended_priors)
|