redback 1.0.31__py3-none-any.whl → 1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. redback/__init__.py +3 -2
  2. redback/analysis.py +321 -4
  3. redback/filters.py +57 -23
  4. redback/get_data/directory.py +18 -0
  5. redback/likelihoods.py +260 -0
  6. redback/model_library.py +12 -2
  7. redback/plotting.py +335 -4
  8. redback/priors/blackbody_spectrum_with_absorption_and_emission_lines.prior +9 -0
  9. redback/priors/csm_shock_and_arnett_two_rphots.prior +11 -0
  10. redback/priors/exp_rise_powerlaw_decline.prior +6 -0
  11. redback/priors/powerlaw_spectrum_with_absorption_and_emission_lines.prior +8 -0
  12. redback/priors/shockcooling_morag.prior +6 -0
  13. redback/priors/shockcooling_morag_and_arnett.prior +10 -0
  14. redback/priors/shockcooling_morag_and_arnett_bolometric.prior +9 -0
  15. redback/priors/shockcooling_morag_bolometric.prior +5 -0
  16. redback/priors/shockcooling_sapirandwaxman.prior +6 -0
  17. redback/priors/shockcooling_sapirandwaxman_bolometric.prior +5 -0
  18. redback/priors/shockcooling_sapirwaxman_and_arnett.prior +10 -0
  19. redback/priors/shockcooling_sapirwaxman_and_arnett_bolometric.prior +9 -0
  20. redback/priors/shocked_cocoon_and_arnett.prior +13 -0
  21. redback/priors/synchrotron_ism.prior +6 -0
  22. redback/priors/synchrotron_massloss.prior +6 -0
  23. redback/priors/synchrotron_pldensity.prior +7 -0
  24. redback/priors/thermal_synchrotron_v2_fluxdensity.prior +8 -0
  25. redback/priors/thermal_synchrotron_v2_lnu.prior +7 -0
  26. redback/priors.py +10 -3
  27. redback/result.py +9 -1
  28. redback/sampler.py +46 -4
  29. redback/sed.py +48 -1
  30. redback/simulate_transients.py +5 -1
  31. redback/tables/filters.csv +265 -254
  32. redback/transient/__init__.py +2 -3
  33. redback/transient/transient.py +648 -10
  34. redback/transient_models/__init__.py +3 -2
  35. redback/transient_models/extinction_models.py +3 -2
  36. redback/transient_models/gaussianprocess_models.py +45 -0
  37. redback/transient_models/general_synchrotron_models.py +296 -6
  38. redback/transient_models/phenomenological_models.py +154 -7
  39. redback/transient_models/shock_powered_models.py +503 -40
  40. redback/transient_models/spectral_models.py +82 -0
  41. redback/transient_models/supernova_models.py +333 -7
  42. redback/transient_models/tde_models.py +57 -41
  43. redback/utils.py +302 -51
  44. {redback-1.0.31.dist-info → redback-1.1.dist-info}/METADATA +8 -6
  45. {redback-1.0.31.dist-info → redback-1.1.dist-info}/RECORD +48 -29
  46. {redback-1.0.31.dist-info → redback-1.1.dist-info}/WHEEL +1 -1
  47. {redback-1.0.31.dist-info → redback-1.1.dist-info/licenses}/LICENCE.md +0 -0
  48. {redback-1.0.31.dist-info → redback-1.1.dist-info}/top_level.txt +0 -0
redback/__init__.py CHANGED
@@ -1,8 +1,9 @@
1
1
  from redback import analysis, constants, get_data, redback_errors, priors, result, sampler, transient, \
2
- transient_models, utils, photosphere, sed, interaction_processes, constraints, plotting, model_library, simulate_transients
2
+ transient_models, utils, photosphere, sed, interaction_processes, constraints, plotting, model_library, \
3
+ simulate_transients
3
4
  from redback.transient import afterglow, kilonova, prompt, supernova, tde
4
5
  from redback.sampler import fit_model
5
6
  from redback.utils import setup_logger
6
7
 
7
- __version__ = "1.0.31"
8
+ __version__ = "1.1.0"
8
9
  setup_logger(log_level='info')
redback/analysis.py CHANGED
@@ -4,7 +4,7 @@ import numpy as np
4
4
  import pandas as pd
5
5
 
6
6
  import redback.model_library
7
- from redback.utils import logger, find_nearest
7
+ from redback.utils import logger, find_nearest, bands_to_frequency
8
8
  from redback.result import RedbackResult
9
9
  from redback.constants import day_to_s
10
10
  import matplotlib
@@ -97,9 +97,9 @@ def plot_evolution_parameters(result, random_models=100):
97
97
  nn = output.nn
98
98
  mu = output.mu
99
99
  alpha = output.alpha
100
- ax[0].plot(time, nn, "--", lw=1, color='red', alpha=2.5, zorder=-1)
101
- ax[1].plot(time, np.rad2deg(alpha), "--", lw=1, color='red', alpha=2.5, zorder=-1)
102
- ax[2].plot(time, mu, "--", lw=1, color='red', alpha=2.5, zorder=-1)
100
+ ax[0].plot(time, nn, "--", lw=1, color='red', zorder=-1)
101
+ ax[1].plot(time, np.rad2deg(alpha), "--", lw=1, color='red', zorder=-1)
102
+ ax[2].plot(time, mu, "--", lw=1, color='red', zorder=-1)
103
103
  ax[0].set_ylabel('braking index')
104
104
  ax[1].set_ylabel('inclination angle')
105
105
  ax[2].set_ylabel('magnetic moment')
@@ -151,3 +151,320 @@ def plot_spectrum(model, parameters, time_to_plot, axes=None, **kwargs):
151
151
  ax.set_ylabel(r'Flux ($10^{-17}$ erg s$^{-1}$ cm$^{-2}$ $\mathrm{\AA}$)')
152
152
  ax.legend(loc='upper left')
153
153
  return ax
154
+
155
+ def plot_gp_lightcurves(transient, gp_output, axes=None, band_colors=None, band_scaling=None):
156
+ """
157
+ Plot the Gaussian Process lightcurves
158
+
159
+ :param transient: A transient object
160
+ :param gp_output: The output of the fit_gp function
161
+ :param axes: axes, ideally you should be passing the axes from the plot_data methods
162
+ :param band_colors: a dictionary of band colors; again ideally you should be passing the band_colors from the plot_data methods
163
+ :return: axes object with the GP lightcurves plotted
164
+ """
165
+ ax = axes or plt.gca()
166
+
167
+ if transient.use_phase_model:
168
+ ref_date = transient.x[0]
169
+ else:
170
+ ref_date = 0
171
+
172
+ t_new = np.linspace(transient.x.min() - 10, transient.x.max() + 20, 100)
173
+
174
+ if transient.data_mode in ['flux_density', 'flux', 'magnitude']:
175
+ if band_colors is None:
176
+ band_colors = dict(zip(transient.unique_bands, plt.cm.tab20(range(len(transient.unique_bands)))))
177
+ else:
178
+ band_colors = band_colors
179
+ if gp_output.use_frequency:
180
+ for band in transient.unique_bands:
181
+ if band_scaling:
182
+ scaling = band_scaling[band]
183
+ else:
184
+ scaling = 0
185
+ f_new = np.ones_like(t_new) * bands_to_frequency([band])
186
+ X_new = np.column_stack((f_new, t_new))
187
+ gp = gp_output.gp
188
+ y_pred, y_cov = gp.predict(gp_output.scaled_y, X_new, return_cov=True)
189
+ y_std = np.sqrt(np.diag(y_cov))
190
+ y_lower = y_pred - 0.5 * y_std
191
+ y_upper = y_pred + 0.5 * y_std
192
+ ax.plot(t_new - ref_date, (y_pred * gp_output.y_scaler) + scaling, color=band_colors[band])
193
+ ax.fill_between(t_new - ref_date, (y_lower * gp_output.y_scaler) + scaling,
194
+ (y_upper * gp_output.y_scaler) + scaling, alpha=0.5,
195
+ color=band_colors[band])
196
+ else:
197
+ for band in transient.unique_bands:
198
+ if band_scaling:
199
+ scaling = band_scaling[band]
200
+ else:
201
+ scaling = 0
202
+ gp = gp_output.gp[band]
203
+ y_pred, y_cov = gp.predict(gp_output.scaled_y[band], t_new, return_cov=True)
204
+ y_std = np.sqrt(np.diag(y_cov))
205
+ y_lower = y_pred - 0.5 * y_std
206
+ y_upper = y_pred + 0.5 * y_std
207
+ ax.plot(t_new - ref_date, (y_pred * gp_output.y_scaler) + scaling, color=band_colors[band])
208
+ ax.fill_between(t_new - ref_date, (y_lower * gp_output.y_scaler) + scaling,
209
+ (y_upper * gp_output.y_scaler) + scaling, alpha=0.5,
210
+ color=band_colors[band])
211
+ else:
212
+ y_pred, y_cov = gp_output.gp.predict(gp_output.scaled_y, t_new, return_cov=True)
213
+ y_std = np.sqrt(np.diag(y_cov))
214
+ y_lower = y_pred - 0.5 * y_std
215
+ y_upper = y_pred + 0.5 * y_std
216
+
217
+ ax.plot(t_new, y_pred * gp_output.y_scaler, color='red')
218
+ ax.fill_between(t_new, y_lower * gp_output.y_scaler, y_upper * gp_output.y_scaler, alpha=0.5, color='red')
219
+ return ax
220
+
221
+ def fit_temperature_and_radius_gp(data, kernelT, kernelR, plot=False, **kwargs):
222
+ """
223
+ Fit a Gaussian Process to the temperature and radius data
224
+
225
+ :param data: DataFrame containing the temperature and radius data output of the transient.estimate_bb_params method.
226
+ :param kernelT: george kernel for the temperature
227
+ :param kernelR: george kernel for the radius
228
+ :param plot: Whether to make a two-panel plot of the temperature and radius GP evolution and the data
229
+ :param kwargs: Additional keyword arguments
230
+ :param inflate_errors: If True, inflate the errors by 20%, default is False
231
+ :return: Temperature and radius GP objects and plot fig and axes if requested
232
+ """
233
+ import george
234
+ from scipy.optimize import minimize
235
+
236
+ temperature = data['temperature']
237
+ radius = data['radius']
238
+ t_data = data['epoch_times']
239
+ T_err = data['temp_err']
240
+ R_err = data['radius_err']
241
+ inflate_errors = kwargs.get('inflate_errors', True)
242
+ if inflate_errors:
243
+ error = kwargs.get('error', 1.5)
244
+ else:
245
+ error = 1
246
+ gp_T_err_raw = T_err * error
247
+ gp_R_err = R_err * error
248
+
249
+ fit_in_log = kwargs.get("fit_in_log", False)
250
+ if fit_in_log:
251
+ # In log space, use: log10(T); propagate errors via: δ(log10T)=δT/(T*ln(10))
252
+ temperature_fit = np.log10(temperature)
253
+ gp_T_err = gp_T_err_raw / (temperature * np.log(10))
254
+ else:
255
+ temperature_fit = temperature
256
+ gp_T_err = gp_T_err_raw
257
+
258
+ gp_T = george.GP(kernelT)
259
+ gp_T.compute(t_data, gp_T_err + 1e-8)
260
+
261
+ def neg_ln_like_T(p):
262
+ gp_T.set_parameter_vector(p)
263
+ return -gp_T.log_likelihood(temperature_fit)
264
+
265
+ def grad_neg_ln_like_T(p):
266
+ gp_T.set_parameter_vector(p)
267
+ return -gp_T.grad_log_likelihood(temperature_fit)
268
+
269
+ p0_T = gp_T.get_parameter_vector()
270
+ result_T = minimize(neg_ln_like_T, p0_T, jac=grad_neg_ln_like_T)
271
+ gp_T.set_parameter_vector(result_T.x)
272
+
273
+ logger.info("Finished GP fit for temperature")
274
+ logger.info(f"GP final parameters: {gp_T.get_parameter_dict()}")
275
+
276
+ gp_R = george.GP(kernelR)
277
+ gp_R.compute(t_data, gp_R_err + 1e-8)
278
+
279
+ def neg_ln_like_R(p):
280
+ gp_R.set_parameter_vector(p)
281
+ return -gp_R.log_likelihood(radius)
282
+
283
+ def grad_neg_ln_like_R(p):
284
+ gp_R.set_parameter_vector(p)
285
+ return -gp_R.grad_log_likelihood(radius)
286
+
287
+ p0_R = gp_R.get_parameter_vector()
288
+ result_R = minimize(neg_ln_like_R, p0_R, jac=grad_neg_ln_like_R)
289
+ gp_R.set_parameter_vector(result_R.x)
290
+
291
+ logger.info("Finished GP fit for radius")
292
+ logger.info(f"GP final parameters: {gp_R.get_parameter_dict()}")
293
+
294
+ if plot:
295
+ sigma_to_plot = kwargs.get('sigma_to_plot', 1)
296
+ label = r"${}\sigma$ GP uncertainty".format(str(int(sigma_to_plot)))
297
+ t_pred = np.linspace(t_data.min(), t_data.max(), 100)
298
+ # Temperature prediction
299
+ T_pred, T_pred_var = gp_T.predict(temperature_fit, t_pred, return_var=True)
300
+ T_pred_std = np.sqrt(T_pred_var)
301
+
302
+ # If fitting in log space, convert the prediction back to linear units.
303
+ if fit_in_log:
304
+ T_pred_lin = 10**T_pred
305
+ # Propagate the uncertainty approximately: dT ≈ 10^x * ln(10) * sigma_x.
306
+ T_pred_std_lin = 10**T_pred * np.log(10) * T_pred_std
307
+ else:
308
+ T_pred_lin = T_pred
309
+ T_pred_std_lin = T_pred_std
310
+
311
+ # Radius prediction
312
+ R_pred, R_pred_var = gp_R.predict(radius, t_pred, return_var=True)
313
+ R_pred_std = np.sqrt(R_pred_var)
314
+
315
+ fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(7, 3))
316
+ ax1.errorbar(t_data, temperature, yerr=T_err, fmt='o', label='Data', color='blue')
317
+ ax1.plot(t_pred, T_pred_lin, label='GP Prediction', color='red')
318
+ ax1.fill_between(t_pred, T_pred_lin - sigma_to_plot*T_pred_std_lin, T_pred_lin + sigma_to_plot*T_pred_std_lin,
319
+ alpha=0.2, color='red', label=label)
320
+ ax2.errorbar(t_data, radius, yerr=R_err, fmt='o', label='Data', color='blue')
321
+ ax2.plot(t_pred, R_pred, label='GP Prediction', color='red')
322
+ ax2.fill_between(t_pred, R_pred - sigma_to_plot*R_pred_std, R_pred + sigma_to_plot*R_pred_std, alpha=0.2, color='red',
323
+ label=label)
324
+
325
+ ax1.set_xlabel("Time", fontsize=15)
326
+ ax1.set_ylabel("Temperature [K]", fontsize=15)
327
+ ax1.set_title("Temperature Evolution", fontsize=15)
328
+ ax2.set_xlabel("Time", fontsize=15)
329
+ ax2.set_ylabel("Radius [cm]", fontsize=15)
330
+ ax2.set_title("Radius Evolution from GP", fontsize=15)
331
+
332
+ ax1.set_yscale('log')
333
+ ax2.set_yscale('log')
334
+
335
+ ax1.legend()
336
+ ax2.legend()
337
+ plt.subplots_adjust(wspace=0.3)
338
+ return gp_T, gp_R, fig, (ax1, ax2)
339
+ else:
340
+ return gp_T, gp_R
341
+
342
+ def generate_new_transient_data_from_gp(gp_out, t_new, transient, **kwargs):
343
+ """
344
+ Generates new transient data based on Gaussian Process (GP) predictions for the given time array
345
+ and transient object. Depending on the data mode of the transient object
346
+ (e.g., 'flux_density', 'flux', 'magnitude', or 'luminosity'), this function updates the data
347
+ accordingly, adjusting errors and scaling by frequency if necessary.
348
+
349
+ :param gp_out: The GP output object containing the Gaussian Process model, scaled data,
350
+ and other related attributes.
351
+ :type gp_out: object
352
+ :param t_new: Array of new time values for which GP predictions are to be generated.
353
+ :type t_new: array-like
354
+ :param transient: The transient object containing the original observation data and related
355
+ properties such as data mode and unique frequencies or bands.
356
+ :type transient: object
357
+ :param kwargs: Additional parameters to modify behavior, such as:
358
+
359
+ - **inflate_y_err** (bool): Flag to indicate whether to inflate GP errors.
360
+ - **error** (float): Multiplier for adjusting GP error inflation.
361
+
362
+ :return: A new transient object with data updated using GP predictions.
363
+ :rtype: object
364
+ """
365
+ data_mode = transient.data_mode
366
+ logger.info(f"Data mode: {data_mode}")
367
+ logger.info("Creating new {} data".format(data_mode))
368
+
369
+ if data_mode not in ['flux_density', 'flux', 'magnitude', 'luminosity']:
370
+ raise ValueError("Data mode {} not understood".format(data_mode))
371
+
372
+ if kwargs.get('inflate_y_err', True):
373
+ error = kwargs.get('error', 10)
374
+ else:
375
+ logger.info("Using GP predicted errors, this is likely being too conservative")
376
+ error = 1.
377
+
378
+ if gp_out.use_frequency:
379
+ logger.info("GP is a 2D kernel with effective frequency")
380
+ freqs = transient.unique_frequencies
381
+ T, F = np.meshgrid(t_new, freqs)
382
+ try:
383
+ bands = redback.utils.frequency_to_bandname(F.flatten())
384
+ except Exception:
385
+ bands = F.flatten().astype(str)
386
+ X_new = np.column_stack((F.flatten(), T.flatten()))
387
+ y_pred, y_var = gp_out.gp.predict(gp_out.scaled_y, X_new, return_var=True)
388
+ y_std = np.sqrt(y_var)
389
+ y_err = y_std * error
390
+ y_pred = y_pred * gp_out.y_scaler
391
+ tts = T.flatten()
392
+ freqs = F.flatten()
393
+ else:
394
+ logger.info("GP is a 1D kernel")
395
+ if data_mode == 'flux_density':
396
+ logger.warning("Bandnames/frequency attributes for the transient object may be weird, "
397
+ "Please check for yourself")
398
+ tts = []
399
+ ys = []
400
+ yerrs = []
401
+ bbs = []
402
+ for key in gp_out.gp.keys():
403
+ gp = gp_out.gp[key]
404
+ y_pred, y_cov = gp.predict(gp_out.scaled_y[key], t_new, return_cov=True)
405
+ y_std = np.sqrt(np.diag(y_cov))
406
+ y_err = y_std * error
407
+ y_pred = y_pred * gp_out.y_scaler
408
+ _bands = np.repeat(key, len(t_new))
409
+ bbs.append(key)
410
+ tts.append(t_new)
411
+ ys.append(y_pred)
412
+ yerrs.append(y_err)
413
+ temp_frame = pd.DataFrame({'time': tts, 'ys': ys, 'yerr': yerrs, 'band': bbs})
414
+ temp_frame.sort_values('time', inplace=True)
415
+ y_pred = temp_frame['ys']
416
+ y_err = temp_frame['yerr']
417
+ bands = temp_frame['band']
418
+ freqs = temp_frame['band']
419
+ tts = temp_frame['time']
420
+ elif data_mode in ['flux', 'magnitude']:
421
+ tts = []
422
+ ys = []
423
+ yerrs = []
424
+ bbs = []
425
+ for band in transient.unique_bands:
426
+ gp = gp_out.gp[band]
427
+ y_pred, y_cov = gp.predict(gp_out.scaled_y[band], t_new, return_cov=True)
428
+ y_std = np.sqrt(np.diag(y_cov))
429
+ y_err = y_std * error
430
+ y_pred = y_pred * gp_out.y_scaler
431
+ _bands = np.repeat(band, len(t_new))
432
+ bbs.append(_bands)
433
+ tts.append(t_new)
434
+ ys.append(y_pred)
435
+ yerrs.append(y_err)
436
+ temp_frame = pd.DataFrame({'time':tts, 'ys':ys, 'yerr':yerrs, 'band':bbs})
437
+ temp_frame.sort_values('time', inplace=True)
438
+ y_pred = temp_frame['ys']
439
+ y_err = temp_frame['yerr']
440
+ bands = temp_frame['band']
441
+ tts = temp_frame['time']
442
+ elif data_mode == 'luminosity':
443
+ y_pred, y_cov = gp_out.gp.predict(gp_out.scaled_y, t_new, return_cov=True)
444
+ y_std = np.sqrt(np.diag(y_cov))
445
+ y_err = y_std * error
446
+ y_pred = y_pred * gp_out.y_scaler
447
+ tts = t_new
448
+
449
+ logger.info(f"Data mode: {data_mode}")
450
+ logger.info("Creating new transient object with GP data")
451
+ if data_mode == 'flux_density':
452
+ new_transient = redback.transient.OpticalTransient(name=transient.name + '_gp',
453
+ flux_density=y_pred, flux_density_err=y_err,
454
+ time=tts, bands=bands, frequency=freqs,
455
+ data_mode=data_mode, redshift=transient.redshift)
456
+ elif data_mode == 'flux':
457
+ new_transient = redback.transient.OpticalTransient(name=transient.name + '_gp',
458
+ flux=y_pred, flux_err=y_err,
459
+ time=tts, bands=bands,
460
+ data_mode=data_mode, redshift=transient.redshift)
461
+ elif data_mode == 'magnitude':
462
+ new_transient = redback.transient.OpticalTransient(name=transient.name + '_gp',
463
+ magnitude=y_pred, magnitude_err=y_err,
464
+ time=tts, bands=bands,
465
+ data_mode=data_mode, redshift=transient.redshift)
466
+ elif data_mode == 'luminosity':
467
+ new_transient = redback.transient.OpticalTransient(name=transient.name + '_gp',
468
+ Lum50=y_pred, Lum50_err=y_err,
469
+ time_rest_frame=tts, data_mode=data_mode)
470
+ return new_transient
redback/filters.py CHANGED
@@ -5,7 +5,7 @@ import numpy as np
5
5
  import redback
6
6
  import sncosmo
7
7
 
8
- def add_to_database(LABEL, WAVELENGTH, ZEROFLUX, DATABASE, PLOT_LABEL):
8
+ def add_to_database(LABEL, WAVELENGTH, ZEROFLUX, DATABASE, PLOT_LABEL, EFFECTIVE_WIDTH):
9
9
 
10
10
  """
11
11
  Add a filter to the Redback filter database.
@@ -15,8 +15,9 @@ def add_to_database(LABEL, WAVELENGTH, ZEROFLUX, DATABASE, PLOT_LABEL):
15
15
  """
16
16
 
17
17
  frequency = 3.0e8 / WAVELENGTH
18
-
19
- DATABASE.add_row([LABEL, frequency, WAVELENGTH*1e10, 'black', ZEROFLUX, LABEL, PLOT_LABEL])
18
+ effective_width = 3.0e8 / EFFECTIVE_WIDTH
19
+ print(effective_width)
20
+ DATABASE.add_row([LABEL, frequency, WAVELENGTH*1e10, 'black', ZEROFLUX, LABEL, PLOT_LABEL, effective_width])
20
21
 
21
22
  def add_to_sncosmo(LABEL, TRANSMISSION):
22
23
 
@@ -30,7 +31,7 @@ def add_to_sncosmo(LABEL, TRANSMISSION):
30
31
  band = sncosmo.Bandpass(TRANSMISSION['Wavelength'], TRANSMISSION['Transmission'], name=LABEL, wave_unit=u.angstrom)
31
32
  sncosmo.register(band, LABEL, force=True)
32
33
 
33
- def add_filter_svo(FILTER, LABEL, PLOT_LABEL=None):
34
+ def add_filter_svo(FILTER, LABEL, PLOT_LABEL=None, OVERWRITE=False):
34
35
 
35
36
  """
36
37
  Wrapper to add a filter from SVO to SNCosmo and the Redback filter database
@@ -44,9 +45,13 @@ def add_filter_svo(FILTER, LABEL, PLOT_LABEL=None):
44
45
 
45
46
  mask = np.where((database_filters['bands'] == LABEL) & (database_filters['sncosmo_name'] == LABEL))[0]
46
47
 
47
- # Only add filter to filter database if entry does not exist in the Redback database
48
+ # Only add filter to filter database if entry does not exist in the Redback database by default
48
49
 
49
- if len(mask) == 0:
50
+ # If no entry exists or you choose to overwrite an entry
51
+ if (len(mask) == 0) or ( (len(mask) != 0) & OVERWRITE ):
52
+
53
+ if len(mask) > 0:
54
+ database_filters.remove_rows(mask)
50
55
 
51
56
  # Reference (=pivot) wavelength, unit: AA
52
57
  wavelength_pivot = FILTER['WavelengthRef']
@@ -71,7 +76,7 @@ def add_filter_svo(FILTER, LABEL, PLOT_LABEL=None):
71
76
 
72
77
  plot_label = PLOT_LABEL if PLOT_LABEL != None else LABEL
73
78
 
74
- add_to_database(LABEL, wavelength_pivot * 1.0e-10, zeroflux, database_filters, plot_label)
79
+ add_to_database(LABEL, wavelength_pivot * 1.0e-10, zeroflux, database_filters, plot_label, effective_width)
75
80
 
76
81
  # Non-standard filters always needs to be re-added to SN Cosmo even if an entry exists in filter.csv
77
82
 
@@ -80,9 +85,10 @@ def add_filter_svo(FILTER, LABEL, PLOT_LABEL=None):
80
85
 
81
86
  # Prettify output
82
87
 
83
- database_filters['wavelength [Hz]'].info.format = '.03e'
84
- database_filters['wavelength [Angstrom]'].info.format = '.02f'
85
- database_filters['reference_flux'].info.format = '.03e'
88
+ database_filters['wavelength [Hz]'].info.format = '.05e'
89
+ database_filters['wavelength [Angstrom]'].info.format = '.05f'
90
+ database_filters['reference_flux'].info.format = '.05e'
91
+ database_filters['effective_width [Hz]'].info.format = '.05e'
86
92
 
87
93
  database_filters.write(redback_db_fname, overwrite=True, format='csv')
88
94
 
@@ -154,13 +160,14 @@ def add_filter_user(FILE, LABEL, PLOT_LABEL=None, OVERWRITE=False):
154
160
 
155
161
  print(LABEL, wavelength_pivot * 1.0e-10, zeroflux, plot_label)
156
162
 
157
- add_to_database(LABEL, wavelength_pivot * 1.0e-10, zeroflux, database_filters, plot_label)
163
+ add_to_database(LABEL, wavelength_pivot * 1.0e-10, zeroflux, database_filters, plot_label, effective_width)
158
164
 
159
165
  # Prettify output
160
166
 
161
- database_filters['wavelength [Hz]'].info.format = '.03e'
162
- database_filters['wavelength [Angstrom]'].info.format = '.02f'
163
- database_filters['reference_flux'].info.format = '.03e'
167
+ database_filters['wavelength [Hz]'].info.format = '.05e'
168
+ database_filters['wavelength [Angstrom]'].info.format = '.05f'
169
+ database_filters['reference_flux'].info.format = '.05e'
170
+ database_filters['effective_width [Hz]'].info.format = '.05e'
164
171
 
165
172
  database_filters.write(redback_db_fname, overwrite=True, format='csv')
166
173
 
@@ -168,7 +175,7 @@ def add_filter_user(FILE, LABEL, PLOT_LABEL=None, OVERWRITE=False):
168
175
 
169
176
  print('Filter {} already exists. Set OVERWRITE to True if you want to overwrite the existing entry'.format(LABEL))
170
177
 
171
- def add_common_filters():
178
+ def add_common_filters(overwrite=False):
172
179
 
173
180
  """
174
181
  Adds Euclid, NTT/EFOSC2, MPG/GROND, Spitzer and WISE filters from SVO
@@ -184,7 +191,7 @@ def add_common_filters():
184
191
  filter_label = ['grond::' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
185
192
  plot_label = ['GROND/' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
186
193
 
187
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
194
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
188
195
 
189
196
  print('done.\n')
190
197
 
@@ -199,7 +206,7 @@ def add_common_filters():
199
206
  filter_label = ['efosc2::' + x for x in filter_list['Band']]
200
207
  plot_label = ['EFOSC/' + x for x in filter_list['Band']]
201
208
 
202
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
209
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
203
210
 
204
211
  print('done.\n')
205
212
 
@@ -211,7 +218,7 @@ def add_common_filters():
211
218
  filter_label = ['euclid::' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
212
219
  plot_label = ['EUCLID/' + x.split('/')[1].split('.')[1].upper() for x in filter_list['filterID']]
213
220
 
214
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
221
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
215
222
 
216
223
 
217
224
  filter_list = SvoFps.get_filter_list(facility='Euclid', instrument='NISP')
@@ -220,7 +227,7 @@ def add_common_filters():
220
227
  filter_label = ['euclid::' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
221
228
  plot_label = ['EUCLID/' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
222
229
 
223
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
230
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
224
231
 
225
232
  print('done.\n')
226
233
 
@@ -232,7 +239,7 @@ def add_common_filters():
232
239
  filter_label = ['irac::' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
233
240
  plot_label = ['IRAC/' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
234
241
 
235
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
242
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
236
243
 
237
244
  print('done.\n')
238
245
 
@@ -244,13 +251,40 @@ def add_common_filters():
244
251
  filter_label = ['wise::' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
245
252
  plot_label = ['WISE/' + x.split('/')[1].split('.')[1] for x in filter_list['filterID']]
246
253
 
247
- [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii]) for ii in range(len(filter_list))]
254
+ [add_filter_svo(filter_list[ii], filter_label[ii], plot_label[ii], OVERWRITE=overwrite) for ii in range(len(filter_list))]
248
255
 
249
256
  print('done.\n')
250
257
 
251
258
  def show_all_filters():
252
259
 
253
- redback_db_fname = path = redback.__path__[0] + '/tables/filters.csv'
260
+ redback_db_fname = redback.__path__[0] + '/tables/filters.csv'
254
261
  database_filters = ascii.read(redback_db_fname)
255
262
 
256
- return database_filters
263
+ return database_filters
264
+
265
+ def add_effective_widths():
266
+ """
267
+ Adds effective widths to the Redback filter database
268
+ :return: None
269
+ """
270
+ import pandas as pd
271
+ db = pd.read_csv(redback.__path__[0] + '/tables/filters.csv')
272
+ import sncosmo
273
+ eff_width = np.zeros(len(db))
274
+ for ii, bb in enumerate(db['sncosmo_name']):
275
+ try:
276
+ band = sncosmo.get_bandpass(bb)
277
+ waves = band.wave # wavelengths in Angstroms
278
+ trans = band.trans # corresponding transmission values
279
+
280
+ # Calculate the effective width:
281
+ # effective_width = ∫T(λ) dλ / max(T(λ))
282
+ effective_width = np.trapz(trans, waves) / np.max(trans)
283
+ effective_width = effective_width * u.Angstrom
284
+ eff_width[ii] = effective_width.to(u.Hz, equivalencies=u.spectral()).value
285
+ except Exception:
286
+ redback.utils.logger.warning("Failed for band={} at index={}".format(bb, ii))
287
+ eff_width[ii] = db['wavelength [Hz]'].iloc[ii]
288
+
289
+ db['effective_width [Hz]'] = eff_width
290
+ db.to_csv(redback.__path__[0] + '/tables/filters.csv', index=False)
@@ -12,6 +12,24 @@ SWIFT_PROMPT_BIN_SIZES = ['1s', '2ms', '8ms', '16ms', '64ms', '256ms']
12
12
  DirectoryStructure = namedtuple("DirectoryStructure", ['directory_path', 'raw_file_path', 'processed_file_path'])
13
13
 
14
14
 
15
+ def spectrum_directory_structure(transient: str) -> DirectoryStructure:
16
+ """Provides directory structure for any spectrum data.
17
+
18
+ :param transient: Name of the GRB, e.g. GRB123456.
19
+ :type transient: str
20
+
21
+ :return: The directory structure, with 'directory_path', 'raw_file_path', and 'processed_file_path'
22
+ :rtype: namedtuple
23
+ """
24
+ directory_path = f'spectrum/'
25
+ check_directory_exists_and_if_not_mkdir(directory_path)
26
+
27
+ raw_file_path = f"{directory_path}{transient}_rawdata.csv"
28
+ processed_file_path = f"{directory_path}{transient}.csv"
29
+
30
+ return DirectoryStructure(
31
+ directory_path=directory_path, raw_file_path=raw_file_path, processed_file_path=processed_file_path)
32
+
15
33
  def afterglow_directory_structure(grb: str, data_mode: str, instrument: str = 'BAT+XRT') -> DirectoryStructure:
16
34
  """Provides directory structure for Swift afterglow data.
17
35